Skip to content

Commit

Permalink
Adaptations to work on older compilers.
Browse files Browse the repository at this point in the history
  - Extends yaFyaml (main) functionality  back to
    GFortran 8.2 and Intel 17.0.4.
  - Full functionality is provided in
    GFortran 9.3, Intel 19.0.5, and NAG 7.0 (7015)
  • Loading branch information
tclune committed Apr 16, 2020
1 parent 8fa8d9f commit 1882d2f
Show file tree
Hide file tree
Showing 6 changed files with 50 additions and 32 deletions.
2 changes: 1 addition & 1 deletion src/ArrayWrapper.F90
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ function new_ArrayWrapper(array) result(wrapper)
#ifdef __GFORTRAN__
allocate(wrapper%elements, source=array)
#else
wrapper%elements = array
allocate(wrapper%elements, source=array)
#endif

end function new_ArrayWrapper
Expand Down
2 changes: 1 addition & 1 deletion src/Configuration.F90
Original file line number Diff line number Diff line change
Expand Up @@ -1125,7 +1125,7 @@ function get(this) result(config)
q%node => this%vector_iter%get()
end select
else
config%node = this%scalar_iter%node
allocate(config%node, source=this%scalar_iter%node)
end if

end function get
Expand Down
28 changes: 15 additions & 13 deletions src/Lexer.F90
Original file line number Diff line number Diff line change
Expand Up @@ -162,12 +162,14 @@ function get_token(this, unused, rc) result(token)
do
need_more = this%need_more_tokens(rc=status)
if (status /= SUCCESS) then
token = NullToken()
allocate(token, source=NullToken())
__RETURN__(status)
end if
if (need_more) then
call this%lex_tokens(rc=status)
if (status /= SUCCESS) token = NullToken()
if (status /= SUCCESS) then
allocate(token, source=NullToken())
end if
__VERIFY__(status)
else
exit
Expand All @@ -176,9 +178,9 @@ function get_token(this, unused, rc) result(token)

if (this%processed_tokens%size() >= 0) then
this%num_tokens_given = this%num_tokens_given + 1
token = this%pop_token()
call this%pop_token(token)
else
token = NullToken()
allocate(token, source=NullToken())
end if

__RETURN__(SUCCESS)
Expand Down Expand Up @@ -304,16 +306,16 @@ end subroutine save_simple_key

! Have not implemented stack in gFTL, so
! vector will have to suffice
function pop_token(this) result(token)
class(AbstractToken), allocatable :: token
subroutine pop_token(this, token)
class(Lexer), intent(inout) :: this
class(AbstractToken), allocatable, intent(out) :: token

associate (tokens => this%processed_tokens)
token = tokens%at(1)
allocate(token, source=tokens%at(1))
call tokens%erase(tokens%begin())
end associate

end function pop_token
end subroutine pop_token


! All the different cases ...
Expand Down Expand Up @@ -805,15 +807,15 @@ subroutine process_quoted_scalar(this, style, unused, rc)

call this%save_simple_key()
this%allow_simple_key = .false.
token = this%scan_flow_scalar(style, __RC__)
call this%scan_flow_scalar(token, style, __RC__)
call this%processed_tokens%push_back(token)
__RETURN__(SUCCESS)
end subroutine process_quoted_scalar


function scan_flow_scalar(this, style, unused, rc) result(token)
class(AbstractToken), allocatable :: token
subroutine scan_flow_scalar(this, token, style, unused, rc)
class(Lexer), intent(inout) :: this
class(AbstractToken), allocatable, intent(out) :: token
character, intent(in) :: style
class(KeywordEnforcer), optional, intent(in) :: unused
integer, optional, intent(out) :: rc
Expand All @@ -830,12 +832,12 @@ function scan_flow_scalar(this, style, unused, rc) result(token)
end do
call this%forward()

token = ScalarToken(chunks,is_plain=.false.,style=style)
allocate(token, source=ScalarToken(chunks,is_plain=.false.,style=style))

__RETURN__(status)


end function scan_flow_scalar
end subroutine scan_flow_scalar


function scan_flow_scalar_spaces(this, style, unused, rc) result(text)
Expand Down
41 changes: 25 additions & 16 deletions src/Parser.F90
Original file line number Diff line number Diff line change
Expand Up @@ -54,20 +54,24 @@ function new_Parser_schema(schema) result(p)
type(Parser) :: p
class(AbstractSchema), intent(in) :: schema

p%schema = schema
allocate(p%schema,source=schema)
end function new_Parser_schema

function new_Parser_schema_name(schema_name) result(p)
type(Parser) :: p
character(*), intent(in) :: schema_name

type(JSONSchema) :: json
type(CoreSchema) :: core
type(FailsafeSchema) :: failsafe


select case (schema_name)
case ('json','JSON')
p = Parser(JSONSchema())
p = Parser(json)
case ('core','Core')
p = Parser(CoreSchema())
p = Parser(core)
case ('failsafe','Failsafe')
p = Parser(FailsafeSchema())
p = Parser(failsafe)
case default
error stop "Unknown schema"
end select
Expand Down Expand Up @@ -100,7 +104,7 @@ subroutine top(this, cfg, lexr)

done = .false.
do
token = lexr%get_token()
allocate(token, source=lexr%get_token())
select type (token)
type is (StreamStartToken)
type is (StreamEndToken)
Expand Down Expand Up @@ -143,6 +147,7 @@ subroutine top(this, cfg, lexr)
class default
error stop 'unsupported token type in top'
end select
deallocate(token)
end do

end subroutine top
Expand All @@ -162,7 +167,7 @@ recursive subroutine process_sequence(this, node, lexr)
select type (node)
type is (UnlimitedVector)
do
token = lexr%get_token()
allocate(token,source = lexr%get_token())


select type (token)
Expand Down Expand Up @@ -228,26 +233,28 @@ recursive subroutine process_mapping(this, node, lexr)
select type (q => node)
type is (OrderedStringUnlimitedMap)
do
token = lexr%get_token()
allocate(token, source = lexr%get_token())
select type (token)
type is (ScalarToken)

type is (KeyToken)
next_token = lexr%get_token()
allocate(next_token, source=lexr%get_token())
select type(next_token)
type is (ScalarToken)
key = next_token%value ! always a string
class default
error stop
end select
next_token = lexr%get_token()
deallocate(next_token)
allocate(next_token, source = lexr%get_token())
select type(next_token)
type is (ValueToken)
! mandatory before value
class default
error stop
end select
next_token = lexr%get_token()
deallocate(next_token)
allocate(next_token, source = lexr%get_token())
select type(next_token)
type is (ScalarToken)
call q%insert(key, this%interpret(next_token))
Expand All @@ -266,6 +273,7 @@ recursive subroutine process_mapping(this, node, lexr)
class default
error stop
end select
deallocate(next_token)

type is (FlowNextEntryToken)
expect_another = .true.
Expand All @@ -278,6 +286,7 @@ recursive subroutine process_mapping(this, node, lexr)
error stop 'illegal token encountered B'
end select

deallocate(token)
end do

class default
Expand All @@ -301,16 +310,16 @@ function interpret(this, scalar) result(value)
text = scalar%value

if (this%schema%matches_null(text)) then
value = None
allocate(value, source=none)
elseif (this%schema%matches_logical(text)) then
value = this%schema%to_logical(text)
allocate(value, source = this%schema%to_logical(text))
elseif (this%schema%matches_integer(text)) then
value = this%schema%to_integer(text)
allocate(value, source = this%schema%to_integer(text))
elseif(this%schema%matches_real(text)) then
value = this%schema%to_real(text)
allocate(value, source = this%schema%to_real(text))
else
! anything else is a string (workaround for gFortran)
value = String(text)
allocate(value, source = String(text))
end if

end function interpret
Expand Down
2 changes: 1 addition & 1 deletion src/Reader.F90
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ module fy_Reader
function new_Reader(stream) result(r)
type(Reader) :: r
class(AbstractTextStream), intent(in) :: stream
r%stream = stream
allocate(r%stream, source=stream)
r%raw_buffer = ''
r%buffer = ''
end function new_Reader
Expand Down
7 changes: 7 additions & 0 deletions tests/Test_Configuration_dump.pf
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,22 @@ contains
subroutine test_dump_scalar_logical()
type(Configuration) :: cfg
character(100) :: buffer
!!$ character(:), allocatable :: str

cfg = Configuration(.true.)
!!$ call cfg%to_json(str)
!!$ buffer = str

write(buffer,"(DT)") cfg
#ifdef __GFORTRAN__
#else
@assert_that(trim(buffer),is(equal_to('true')))
#endif

cfg = Configuration(.false.)
!!$ call cfg%to_json(str)
!!$ buffer = str
print*,__FILE__,__LINE__
write(buffer,'(DT)') cfg
@assert_that(trim(buffer),is(equal_to('false')))

Expand Down

0 comments on commit 1882d2f

Please sign in to comment.