mirror of
https://github.com/OpenTTD/OpenTTD.git
synced 2025-08-13 17:49:10 +00:00
Compare commits
105 Commits
a0b2f28f9c
...
1.10.3
Author | SHA1 | Date | |
---|---|---|---|
|
baf5bf29fa | ||
|
2a083e6c0d | ||
|
d9702c9d20 | ||
|
f8bbd8dea0 | ||
|
77bee1b941 | ||
|
4d7d0f0015 | ||
|
f712c235e9 | ||
|
dbb58fa5de | ||
|
7b05f8e741 | ||
|
e3ee7913d4 | ||
|
d8c4a58448 | ||
|
cf943d3a74 | ||
|
096cbf2dbb | ||
|
3cfb8524f0 | ||
|
a9aa636b40 | ||
|
cb5a1300ac | ||
|
536184e21f | ||
|
97504e7c29 | ||
|
24960b4641 | ||
|
482974b9fe | ||
|
4b1070cf75 | ||
|
b827e2415b | ||
|
c10571d7ce | ||
|
d662e8ca96 | ||
|
762f5d258a | ||
|
b7fa118069 | ||
|
211735de3a | ||
|
430ce20f8d | ||
|
20007fd1f4 | ||
|
02980119e4 | ||
|
dd4aae830d | ||
|
f4ed770cff | ||
|
89c8215b79 | ||
|
911f9165cf | ||
|
2748a90bc9 | ||
|
583a2221ca | ||
|
86113008ab | ||
|
31a9f549fb | ||
|
8203adecb5 | ||
|
3d8e68f966 | ||
|
aca5d97a68 | ||
|
fd8ca95947 | ||
|
9617fa727d | ||
|
74b591c2e9 | ||
|
78e558717c | ||
|
cb9c4bf4a0 | ||
|
e39c5829a2 | ||
|
a089c876ab | ||
|
1072c74bc4 | ||
|
bb251f45fc | ||
|
21aa339901 | ||
|
3132d29805 | ||
|
94581d352a | ||
|
cf27deb675 | ||
|
40d68273aa | ||
|
2cc244bde0 | ||
|
7872b1e0b5 | ||
|
22519b3b0d | ||
|
5b2447e10c | ||
|
e0680c9ede | ||
|
ea895f05eb | ||
|
37187df7ef | ||
|
2faa89a98e | ||
|
4297cc5f21 | ||
|
591ca82845 | ||
|
e90322f6e9 | ||
|
978cc774ec | ||
|
7f77b8c1e2 | ||
|
2bf936bbbc | ||
|
eb8d79f41f | ||
|
0737458ec4 | ||
|
7e659bc3da | ||
|
612c912144 | ||
|
c8cd5f7f3a | ||
|
0c87ae1875 | ||
|
cc458c9559 | ||
|
9f0371117b | ||
|
b6d409f8a7 | ||
|
d0613bad11 | ||
|
27cbee76b4 | ||
|
7e3eabdaae | ||
|
a4d73d74a2 | ||
|
d4bd17d443 | ||
|
a82dca883f | ||
|
18ad0fa983 | ||
|
fd64c62bcd | ||
|
b542ebfb52 | ||
|
318b51c58a | ||
|
df7f642a02 | ||
|
a0dc9ef847 | ||
|
8b4501604c | ||
|
4082121511 | ||
|
40b3bc6f1a | ||
|
b5a8510b9b | ||
|
aa7dbdc42e | ||
|
2f57d65575 | ||
|
ceae49a1d8 | ||
|
dc2f10bf11 | ||
|
0643a3627f | ||
|
fd106cf67b | ||
|
3d5b729290 | ||
|
81ed69d8d4 | ||
|
bd3ba91181 | ||
|
49d2a07f66 | ||
|
887dce4655 |
@@ -8,9 +8,8 @@ notifications:
|
||||
only:
|
||||
- master
|
||||
only-by:
|
||||
- eints-sync\[bot\]
|
||||
- DorpsGek
|
||||
commit-comment:
|
||||
discussion:
|
||||
pull-request:
|
||||
issue:
|
||||
tag-created:
|
||||
|
468
.editorconfig
468
.editorconfig
@@ -7,471 +7,3 @@ trim_trailing_whitespace = true
|
||||
[*.{c,cpp,h,hpp}]
|
||||
indent_style = tab
|
||||
charset = utf-8
|
||||
#indent_size = <integer>
|
||||
#tab_width = <integer>
|
||||
#end_of_line = <lf, cf, crlf>
|
||||
|
||||
### C++ specific editor config rules ###
|
||||
## Visual Studio ## https://learn.microsoft.com/en-us/visualstudio/ide/cpp-editorconfig-properties?view=vs-2019 ##
|
||||
# Indentation #
|
||||
# Do not indent braces
|
||||
cpp_indent_braces = false
|
||||
# When a new line is typed, it's indented relatively to the innermost open parenthesis
|
||||
cpp_indent_multi_line_relative_to = innermost_parenthesis
|
||||
# Indent new lines within parenthesis
|
||||
cpp_indent_within_parentheses = indent
|
||||
# In existing code, do not use the setting for alignment of new lines within parentheses
|
||||
cpp_indent_preserve_within_parentheses = true
|
||||
# Indent case contents
|
||||
cpp_indent_case_contents = true
|
||||
# Indent case labels
|
||||
cpp_indent_case_labels = true
|
||||
# Do not indent braces following a case statement
|
||||
cpp_indent_case_contents_when_block = false
|
||||
# Do not indent braces of lambdas used as parameters
|
||||
cpp_indent_lambda_braces_when_parameter = false
|
||||
# No override for "Position of goto labels" (Unspecified)
|
||||
#cpp_indent_goto_labels
|
||||
# Move preprocessor directives to the leftmost column
|
||||
cpp_indent_preprocessor = leftmost_column
|
||||
# Do not indent access specifiers
|
||||
cpp_indent_access_specifiers = false
|
||||
# Indent namespace contents
|
||||
cpp_indent_namespace_contents = true
|
||||
# Preserve indentation of comments
|
||||
cpp_indent_preserve_comments = true
|
||||
|
||||
# Newline #
|
||||
# Keep open braces for namespaces on the same line, but add a space before the brace
|
||||
cpp_new_line_before_open_brace_namespace = same_line
|
||||
# Keep open braces for types/classes on the same line, but add a space before the brace
|
||||
cpp_new_line_before_open_brace_type = same_line
|
||||
# Move open braces for functions to a new line
|
||||
cpp_new_line_before_open_brace_function = new_line
|
||||
# Keep open braces for control blocks on the same line, but add a space before the brace
|
||||
cpp_new_line_before_open_brace_block = same_line
|
||||
# Keep open braces for lambdas on the same line, but add a space before the brace
|
||||
cpp_new_line_before_open_brace_lambda = same_line
|
||||
# Place scope braces on separate lines
|
||||
cpp_new_line_scope_braces_on_separate_lines = true
|
||||
# Do not move closing braces to the same line as opening braces for empty types
|
||||
cpp_new_line_close_brace_same_line_empty_type = false
|
||||
# Do not move closing braces to the same line as opening braces for empty function bodies
|
||||
cpp_new_line_close_brace_same_line_empty_function = false
|
||||
# Do not place 'catch' and similar keywords on a new line
|
||||
cpp_new_line_before_catch = false
|
||||
# Do not place 'else' on a new line
|
||||
cpp_new_line_before_else = false
|
||||
# Do not place 'while' in a do-while loop on a new line
|
||||
cpp_new_line_before_while_in_do_while = false
|
||||
|
||||
# Spacing #
|
||||
# Remove spaces between function names and opening parentheses of argument lists
|
||||
cpp_space_before_function_open_parenthesis = remove
|
||||
# Do not insert a space within parentheses of an argument list
|
||||
cpp_space_within_parameter_list_parentheses = false
|
||||
# Do not insert a space between parentheses when argument list is empty
|
||||
cpp_space_between_empty_parameter_list_parentheses = false
|
||||
# Insert space between keyword and opening parenthesis in control flow statements
|
||||
cpp_space_after_keywords_in_control_flow_statements = true
|
||||
# Do not insert a space within parentheses of a control statement
|
||||
cpp_space_within_control_flow_statement_parentheses = false
|
||||
# Do not insert a space before opening parenthesis of lambda argument lists
|
||||
cpp_space_before_lambda_open_parenthesis = false
|
||||
# Do not insert a space within parentheses of a C-style cast
|
||||
cpp_space_within_cast_parentheses = false
|
||||
# Do not insert a space after closing parenthesis of C-style cast
|
||||
cpp_space_after_cast_close_parenthesis = false
|
||||
# Do not insert a space within parentheses of a parenthesized expression
|
||||
cpp_space_within_expression_parentheses = false
|
||||
# Insert space before opening brace of blocks
|
||||
cpp_space_before_block_open_brace = true
|
||||
# Do not insert a space between empty braces
|
||||
cpp_space_between_empty_braces = false
|
||||
# Do not insert a space before opening brace of uniform initialization and initializer lists
|
||||
cpp_space_before_initializer_list_open_brace = false
|
||||
# No override for "Insert space within braces of uniform initialization and initializer lists" (Spacing for this varies)
|
||||
#cpp_space_within_initializer_list_braces
|
||||
# Preserve spaces inside uniform initialization and initializer lists
|
||||
cpp_space_preserve_in_initializer_list = true
|
||||
# Do not insert space before opening square brackets
|
||||
cpp_space_before_open_square_bracket = false
|
||||
# Do not insert space within square bracket
|
||||
cpp_space_within_square_brackets = false
|
||||
# Do not insert space before empty square brackets
|
||||
cpp_space_before_empty_square_brackets = false
|
||||
# Do not insert space between empty square brackets
|
||||
cpp_space_between_empty_square_brackets = false
|
||||
# Group square brackets for multi-dimensional arrays together
|
||||
cpp_space_group_square_brackets = true
|
||||
# Do not insert space within square brackets for lambdas
|
||||
cpp_space_within_lambda_brackets = false
|
||||
# Do not insert space between empty lambda brackets
|
||||
cpp_space_between_empty_lambda_brackets = false
|
||||
# Do not insert space before commas
|
||||
cpp_space_before_comma = false
|
||||
# Insert space after commas
|
||||
cpp_space_after_comma = true
|
||||
# Remove spaces before and after member operators
|
||||
cpp_space_remove_around_member_operators = true
|
||||
# Insert space before colon for base in type declarations
|
||||
cpp_space_before_inheritance_colon = true
|
||||
# Insert space before colon for constructors
|
||||
cpp_space_before_constructor_colon = true
|
||||
# Remove space before semicolons
|
||||
cpp_space_remove_before_semicolon = true
|
||||
# No override for "Insert space after semicolons" (Depends on if loop is unconditional)
|
||||
#cpp_space_after_semicolon
|
||||
# Remove spaces between unary operators and their operands
|
||||
cpp_space_remove_around_unary_operator = true
|
||||
# Insert spaces before and after binary operators
|
||||
cpp_space_around_binary_operator = insert
|
||||
# Insert spaces around assignment operators
|
||||
cpp_space_around_assignment_operator = insert
|
||||
# Align pointer/reference symbol to the right
|
||||
cpp_space_pointer_reference_alignment = right
|
||||
# Insert spaces around conditional operators
|
||||
cpp_space_around_ternary_operator = insert
|
||||
|
||||
# Wrapping #
|
||||
# Always apply New Lines settings for blocks
|
||||
cpp_wrap_preserve_blocks = never
|
||||
|
||||
|
||||
|
||||
## Rider & ReSharper ## https://www.jetbrains.com/help/resharper/EditorConfig_CPP_CppBlankLinesPageScheme.html ##
|
||||
# Blank Lines #
|
||||
# No override for "Max blank lines in declarations" (Unspecified; Not consistent across source code)
|
||||
#cpp_keep_blank_lines_in_declarations
|
||||
# No override for "Max blank lines in rest of source code" (Unspecified; Not consistent across source code)
|
||||
#cpp_keep_blank_lines_in_code
|
||||
# No override for "Number of blank lines around class/struct/enum definition" (Unspecified; Not consistent across source code)
|
||||
#cpp_blank_lines_around_class_definition
|
||||
# No override for "Number of blank lines around function declarations" (Unspecified; Not consistent across source code)
|
||||
#cpp_blank_lines_around_function_declaration
|
||||
# No override for "Number of blank lines around function definitions" (Unspecified; Not consistent across source code)
|
||||
#cpp_blank_lines_around_function_definition
|
||||
# No override for "Number of blank lines around single line function definitions" (Unspecified; Not consistent across source code)
|
||||
#cpp_blank_lines_around_single_line_function_definition
|
||||
# No override for "Number of blank lines around namespaces" (Unspecified; Not consistent across source code)
|
||||
#cpp_blank_lines_around_namespace
|
||||
# No override for "Number of blank lines around other definitions and declarations" (Unspecified; Not consistent across source code)
|
||||
#cpp_blank_lines_around_other_declaration
|
||||
|
||||
# Braces Layout #
|
||||
# Keep open braces for namespace declarations on the same line, but add a space before the brace
|
||||
cpp_namespace_declaration_braces = end_of_line
|
||||
# Keep open braces for linkage declarations on the same line, but add a space before the brace
|
||||
cpp_linkage_specification_braces = end_of_line
|
||||
# Keep open braces for types/classes on the same line, but add a space before the brace
|
||||
cpp_type_declaration_braces = end_of_line
|
||||
# Keep open braces for namespace definitions on the same line (does not modify spacing before the brace)
|
||||
cpp_place_namespace_definitions_on_same_line = true
|
||||
# Move open braces for functions to a new line
|
||||
cpp_invocable_declaration_braces = next_line
|
||||
# Keep open braces for lambdas on the same line, but add a space before the brace
|
||||
cpp_anonymous_method_declaration_braces = end_of_line
|
||||
# Keep open braces for case blocks on the same line, but add a space before the brace
|
||||
cpp_case_block_braces = end_of_line
|
||||
# No override for "Requires expression braces" style (requires expressions are a C++20 feature)
|
||||
#cpp_requires_expression_braces
|
||||
# Keep open braces for all other blocks on the same line, but add a space before the brace
|
||||
cpp_other_braces = end_of_line
|
||||
# Only indent the insides of multi-line expression braces
|
||||
cpp_expression_braces = inside
|
||||
# Place the braces of empty blocks together and on the same line
|
||||
cpp_empty_block_style = together_same_line
|
||||
# Force line breaks within simple compound statements
|
||||
cpp_simple_block_style = line_break
|
||||
# No override for "Regular expression for macros starting a block" (Marco blocks aren't used)
|
||||
#cpp_macro_block_begin
|
||||
# No override for "Regular expression for macros ending a block" (Marco blocks aren't used)
|
||||
#cpp_macro_block_end
|
||||
|
||||
# Tabs and indents #
|
||||
# Redundant override (Already overriden globally by 'indent_style')
|
||||
#cpp_indent_style
|
||||
# Redundant override (Already overriden globally by 'indent_size')
|
||||
#cpp_indent_size
|
||||
# Redundant override (Already overriden globally by 'tab_width')
|
||||
#cpp_tab_width
|
||||
# Use spaces instead of tabs as indentation for precise alignment
|
||||
cpp_alignment_tab_fill_style = use_spaces
|
||||
# Allow alignment even if construct is located too far to the right, more than 2/3 of 'Hard wrap at' limit
|
||||
cpp_allow_far_alignment = true
|
||||
|
||||
# Indentation and Alignment #
|
||||
# No override for "Continuous line indent" (Varies throughout source code)
|
||||
#cpp_continuous_line_indent
|
||||
# Do not use continuous line indent in function declaration and invocation parentheses
|
||||
cpp_use_continuous_line_indent_in_method_pars = false
|
||||
# Do not use continuous line indent in initializer lists
|
||||
cpp_use_continuous_line_indent_in_expression_braces = false
|
||||
# Indent namespace members (including nested ones)
|
||||
cpp_namespace_indentation = all
|
||||
# No override for "Indent linkage specification block members" (Unspecified)
|
||||
#cpp_linkage_specification_indentation
|
||||
# Do not indent access specifier from class
|
||||
cpp_indent_access_specifiers_from_class = false
|
||||
# Indent class member from access specifier
|
||||
cpp_indent_class_members_from_access_specifiers = true
|
||||
# Do not indent if a function definition or declaration is wrapped after the type
|
||||
cpp_indent_wrapped_function_names = false
|
||||
# Indent 'case' labels from 'switch'
|
||||
cpp_indent_switch_labels = true
|
||||
# No override for "Indent function declarations' parentheses" (Varies throughout source code)
|
||||
#cpp_indent_method_decl_pars
|
||||
# No override for "Indent method calls' parentheses" (Varies throughout source code)
|
||||
#cpp_indent_invocation_pars
|
||||
# No override for "Indent statement (if, while, for, etc) parentheses" (Varies throughout source code)
|
||||
#cpp_indent_statement_pars
|
||||
# Do not change preprocessor directives indenting
|
||||
cpp_indent_preprocessor_directives = do_not_change
|
||||
# No override for "Indent C++/CLI generic constraints" (C++/CLI is not used)
|
||||
#cpp_indent_type_constraints
|
||||
# Align/indent comments started at the first column
|
||||
cpp_indent_comment = true
|
||||
# Comments that comment out code will use the indentation level of the commented code.
|
||||
cpp_place_comments_at_first_column = false
|
||||
# Align multiline declarators in declaration
|
||||
cpp_align_multiple_declaration = true
|
||||
# Align multiline function parameters
|
||||
cpp_align_multiline_parameter = true
|
||||
# Align multiline call arguments
|
||||
cpp_align_multiline_argument = true
|
||||
# Do not align first of multiline call arguments with the opening parentheses
|
||||
cpp_align_first_arg_by_paren = false
|
||||
# Align multiline initializer list arguments
|
||||
cpp_align_multiline_expression_brace = true
|
||||
# No override for "Align multiline template parameters in template declaration" (Unspecified)
|
||||
#cpp_align_multiline_type_parameter
|
||||
# No override for "Align multiline template arguments" (Unspecified)
|
||||
#cpp_align_multiline_type_argument
|
||||
# Align multiline base classes in class base clause
|
||||
cpp_align_multiline_extends_list = true
|
||||
# Align multiline member initializers in member initializer lists
|
||||
cpp_align_multiline_ctor_init = true
|
||||
# Outdent commas placed on new line
|
||||
cpp_outdent_commas = true
|
||||
# Do not align multiline ?: operator with first line (since alignment is incorrect)
|
||||
cpp_align_ternary = none
|
||||
# Do not indent aligned ?: operator (since indentation varies)
|
||||
cpp_indent_aligned_ternary = false
|
||||
# No override for "Align multiline chained method calls" (Unspecified)
|
||||
#cpp_align_multiline_calls_chain
|
||||
# No override for "Outdent '.' and '->' in chained method calls on new lines" (Unspecified)
|
||||
#cpp_outdent_dots
|
||||
# Do not align multiline chained binary expressions
|
||||
cpp_align_multiline_binary_expressions_chain = false
|
||||
# Fix column alignment in adjacent lines
|
||||
cpp_int_align_fix_in_adjacent = true
|
||||
# Align assignments with adjacent assignments
|
||||
cpp_int_align_eq = true
|
||||
# Do not align declaration names with adjacent declaration names
|
||||
cpp_int_align_declaration_names = false
|
||||
# Align end-of-line comments with adjacent end-of-line comments
|
||||
cpp_int_align_comments = true
|
||||
|
||||
# Spaces #
|
||||
# Do not put space before ANY commas
|
||||
cpp_space_before_comma = false
|
||||
# Put space after ALL commas
|
||||
cpp_space_after_comma = true
|
||||
# Put space before ptr in declaration of variable
|
||||
cpp_space_before_ptr_in_data_member = true
|
||||
# Do not put space after ptr in declaration of variable
|
||||
cpp_space_after_ptr_in_data_member = false
|
||||
# Put space before ptr in declaration of multiple variables
|
||||
cpp_space_before_ptr_in_data_members = true
|
||||
# Do not put space after ptr in declaration of multiple variables
|
||||
cpp_space_after_ptr_in_data_members = false
|
||||
# Put space before ptr in return type of function
|
||||
cpp_space_before_ptr_in_method = true
|
||||
# Do not put space after ptr in return type of function
|
||||
cpp_space_after_ptr_in_method = false
|
||||
# Do not put space before ptr in abstract declaration
|
||||
cpp_space_before_ptr_in_abstract_decl = false
|
||||
# Put space before ref in declaration of variable
|
||||
cpp_space_before_ref_in_data_member = true
|
||||
# Do not put space after ref in declaration of variable
|
||||
cpp_space_after_ref_in_data_member = false
|
||||
# Put space before ref in declaration of multiple variables
|
||||
cpp_space_before_ref_in_data_members = true
|
||||
# Do not put space after ref in declaration of multiple variables
|
||||
cpp_space_after_ref_in_data_members = false
|
||||
# Do not put space before ref in return type of function
|
||||
cpp_space_before_ref_in_method = false
|
||||
# Put space after ref in return type of function
|
||||
cpp_space_after_ref_in_method = true
|
||||
# Do not put space before ref in abstract declaration
|
||||
cpp_space_before_ref_in_abstract_decl = false
|
||||
# Do not put space before parentheses in function parameters
|
||||
cpp_space_between_method_declaration_name_and_open_parenthesis = false
|
||||
# Do not put space before parentheses in lambda parameters
|
||||
cpp_space_before_lambda_parentheses = false
|
||||
# Do not put space within parentheses in function parameters
|
||||
cpp_space_between_method_declaration_parameter_list_parentheses = false
|
||||
# Do not put space within empty parentheses in function parameters
|
||||
cpp_space_between_method_declaration_empty_parameter_list_parentheses = false
|
||||
# Do not put space before angle brackets in template parameters
|
||||
cpp_space_before_template_params = false
|
||||
# Do not put space within angle brackets in template parameters
|
||||
cpp_space_within_template_params = false
|
||||
# Do not put space within empty angle brackets in template parameters
|
||||
cpp_space_within_empty_template_params = false
|
||||
# Do not put space before angle brackets in template arguments
|
||||
cpp_space_before_template_args = false
|
||||
# Do not put space within angle brackets in template arguments
|
||||
cpp_space_within_template_args = false
|
||||
# Do not put space between closing angle brackets in template arguments
|
||||
cpp_space_between_closing_angle_brackets_in_template_args = false
|
||||
# Put space around '=' in alias declaration and namespace alias
|
||||
cpp_space_around_alias_eq = true
|
||||
# Do not put space around '->' in trailing return types
|
||||
cpp_space_around_deref_in_trailing_return_type = false
|
||||
# Put space before base types list colon
|
||||
cpp_space_before_colon_in_inheritance_clause = true
|
||||
# Put space after base types list colon
|
||||
cpp_space_after_colon_in_inheritance_clause = true
|
||||
# No override for "Before C++/CLI generic constraint colon" (Unspecified)
|
||||
#cpp_space_before_type_parameter_constraint_colon
|
||||
# No override for "After C++/CLI generic constraint colon" (Unspecified)
|
||||
#cpp_space_after_type_parameter_constraint_colon
|
||||
# Put space before parentheses of control statements
|
||||
cpp_space_after_keywords_in_control_flow_statements = true
|
||||
# Do not put space within parentheses of control statements
|
||||
cpp_space_between_parentheses_of_control_flow_statements = false
|
||||
# Do not put space before semicolon in 'for' statements
|
||||
cpp_space_before_semicolon_in_for_statement = false
|
||||
# Put space after semicolon in 'for' statements
|
||||
cpp_space_after_semicolon_in_for_statement = true
|
||||
# Put space before ':' in range-based for loop
|
||||
cpp_space_before_for_colon = true
|
||||
# Put space after ':' in range-based for loop
|
||||
cpp_space_after_for_colon = true
|
||||
# Do not put space before colon in switch case or label statement
|
||||
cpp_space_before_colon_in_case = false
|
||||
# Put space after colon in switch case or label statement
|
||||
cpp_space_after_colon_in_case = true
|
||||
# Put space around binary operator
|
||||
cpp_space_around_binary_operator = true
|
||||
# Put space around assignment operator
|
||||
cpp_space_around_assignment_operator = true
|
||||
# Do not put space around dot, '->', '.*' and '->.'
|
||||
cpp_space_around_member_access_operator = false
|
||||
# Do not put space within any parentheses
|
||||
cpp_space_within_parentheses = false
|
||||
# Do not put space before array subscript brackets
|
||||
cpp_space_before_open_square_brackets = false
|
||||
# Do not put space within array subscript brackets
|
||||
cpp_space_between_square_brackets = false
|
||||
# Do not put space before empty parentheses in function call and initialization
|
||||
cpp_space_between_method_call_name_and_opening_parenthesis = false
|
||||
# Do not put space within parentheses in cast expressions
|
||||
cpp_space_between_typecast_parentheses = false
|
||||
# Do not put space after parentheses in cast expressions
|
||||
cpp_space_after_cast = false
|
||||
# Do not put space within parentheses in function call and initialization
|
||||
cpp_space_between_method_call_parameter_list_parentheses = false
|
||||
# Do not put space within empty parentheses in function call and initialization
|
||||
cpp_space_between_method_call_empty_parameter_list_parentheses = false
|
||||
# Put space in ternary operator '? :' before '?'
|
||||
cpp_space_before_ternary_quest = true
|
||||
# Put space in ternary operator '? :' after '?'
|
||||
cpp_space_after_ternary_quest = true
|
||||
# Put space in ternary operator '? :' before ':'
|
||||
cpp_space_before_ternary_colon = true
|
||||
# Put space in ternary operator '? :' after ':'
|
||||
cpp_space_after_ternary_colon = true
|
||||
# Do not put space before uniform initialization braces
|
||||
cpp_space_before_initializer_braces = false
|
||||
# Do not put space within uniform initialization braces
|
||||
cpp_space_within_initializer_braces = false
|
||||
# Do not put space within empty uniform initialization braces
|
||||
cpp_space_within_empty_initializer_braces = false
|
||||
# Put space before end of line comment
|
||||
cpp_space_before_trailing_comment = true
|
||||
# Preserve spaces before end of line comment
|
||||
cpp_disable_space_changes_before_trailing_comment = true
|
||||
|
||||
# Line breaks and Wrapping #
|
||||
# Redundant override (Already overriden globally by 'insert_final_newline')
|
||||
#cpp_insert_final_newline
|
||||
# Redundant override (Already overriden earlier in Visual Studio section)
|
||||
#cpp_new_line_before_else
|
||||
# Do not place 'while' in a do-while loop on a new line
|
||||
cpp_new_line_before_while = false
|
||||
# Redundant override (Already overriden earlier in Visual Studio section)
|
||||
#cpp_new_line_before_catch
|
||||
# Do not change the line breaks of single embedded statements
|
||||
cpp_simple_embedded_statement_style = do_not_change
|
||||
# Do not change the line breaks of simple 'case' statement
|
||||
cpp_simple_case_statement_style = do_not_change
|
||||
# Put member function definition return type on same line
|
||||
cpp_function_definition_return_type_style = on_single_line
|
||||
# Put top-level function definition return type on same line
|
||||
cpp_toplevel_function_definition_return_type_style = on_single_line
|
||||
# Put member function declaration return type on same line
|
||||
cpp_function_declaration_return_type_style = on_single_line
|
||||
# Put top-level function declaration return type on same line
|
||||
cpp_toplevel_function_declaration_return_type_style = on_single_line
|
||||
# Force template<...> of a template declaration on new line
|
||||
cpp_break_template_declaration = line_break
|
||||
# No override for "Break line before the requires-clause" (requires-clause is a C++20 feature)
|
||||
#cpp_line_break_before_requires_clause
|
||||
# Do not change the line break before the colon in member initializer lists
|
||||
cpp_member_initializer_list_style = do_not_change
|
||||
# Do not change the line break after the colon in member initializer lists
|
||||
cpp_line_break_after_colon_in_member_initializer_lists = do_not_change
|
||||
# No override for "Break line before comma in member initializer lists" (Varies throughout source code)
|
||||
#cpp_line_break_before_comma_in_member_initializer_lists
|
||||
# No override for "Break line after comma in member initializer lists" (Varies throughout source code)
|
||||
#cpp_line_break_after_comma_in_member_initializer_lists
|
||||
# No override for "Allow C++/CLI generic constraints on the same line" (C++/CLI is not used)
|
||||
#cpp_place_type_constraints_on_same_line
|
||||
# No override for "Keep existing line breaks" (Varies throughout source code; depends on developer preference)
|
||||
#cpp_keep_user_linebreaks
|
||||
# No override for "Hard wrap at _ characters" (Unspecified)
|
||||
#cpp_max_line_length
|
||||
# Do not prefer wrap before ','
|
||||
cpp_wrap_before_comma = false
|
||||
# Do not prefer wrap before ',' in base clause
|
||||
cpp_wrap_before_comma_in_base_clause = false
|
||||
# No override for "Wrap ternary expression" (Varies throughout source code)
|
||||
#cpp_wrap_ternary_expr_style
|
||||
# No override for "Prefer wrap before '?' and ':' in ternary expressions" (Varies throughout source code)
|
||||
#cpp_wrap_before_ternary_opsigns
|
||||
# No override for "Prefer wrap before ':'" (Varies throughout source code)
|
||||
#cpp_wrap_before_colon
|
||||
# No override for "Prefer wrap before first C++/CLI generic constraint" (C++/CLI is not used)
|
||||
#cpp_wrap_before_first_type_parameter_constraint
|
||||
# No override for "Wrap multiple C++/CLI generic constraints" (C++/CLI is not used)
|
||||
#cpp_wrap_multiple_type_parameter_constraints_style
|
||||
# No override for "Wrap enum definition" (Varies throughout source code)
|
||||
#cpp_wrap_enumeration_style
|
||||
# No override for "Wrap braced initializer list" (Varies throughout source code)
|
||||
#cpp_wrap_braced_init_list_style
|
||||
# No override for "Wrap base classes list" (Varies throughout source code)
|
||||
#cpp_wrap_base_clause_style
|
||||
# No override for "Wrap constructor initializer" (Varies throughout source code)
|
||||
#cpp_wrap_ctor_initializer_style
|
||||
# No override for "Wrap formal parameters" (Varies throughout source code)
|
||||
#cpp_wrap_parameters_style
|
||||
# Do not prefer wrap before '(' in declaration
|
||||
cpp_wrap_before_declaration_lpar = false
|
||||
# Prefer wrap after '(' in declaration
|
||||
cpp_wrap_after_declaration_lpar = true
|
||||
# Do not prefer wrap before ')' in declaration
|
||||
cpp_wrap_before_declaration_rpar = false
|
||||
# No override for "Wrap invocation arguments" (Varies throughout source code)
|
||||
#cpp_wrap_arguments_style
|
||||
# Do not prefer wrap before '(' in invocation
|
||||
cpp_wrap_before_invocation_lpar = false
|
||||
# Prefer wrap after '(' in invocation
|
||||
cpp_wrap_after_invocation_lpar = true
|
||||
# Do not prefer wrap before ')' in invocation
|
||||
cpp_wrap_before_invocation_rpar = false
|
||||
# Prefer wrap after '{' in initializer lists
|
||||
cpp_wrap_after_expression_lbrace = true
|
||||
# Do not prefer wrap before '}' in initializer lists
|
||||
cpp_wrap_before_expression_rbrace = false
|
||||
|
7
.github/ISSUE_TEMPLATE.md
vendored
Normal file
7
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
## Version of OpenTTD
|
||||
|
||||
## Expected result
|
||||
|
||||
## Actual result
|
||||
|
||||
## Steps to reproduce
|
41
.github/ISSUE_TEMPLATE/bug.yaml
vendored
41
.github/ISSUE_TEMPLATE/bug.yaml
vendored
@@ -1,41 +0,0 @@
|
||||
name: Bug Report
|
||||
description: Found a bug in OpenTTD?
|
||||
title: "[Bug]: "
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version of OpenTTD
|
||||
description: Fill in below what version of OpenTTD you are using, including your OS.
|
||||
placeholder: ex. 1.11.2, Windows 10
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected result
|
||||
description: Describe in a few words what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: Actual result
|
||||
description: Describe in a few words what actually happens.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: As detailed as possible, please tell us how we can reproduce this. Feel free to attach a savegame (zip it first) to make it more clear.
|
||||
placeholder: |
|
||||
1. Loaded the attached savegame.
|
||||
2. Click on the button left of that other icon.
|
||||
3. The window doesn't open.
|
||||
validations:
|
||||
required: true
|
5
.github/ISSUE_TEMPLATE/config.yml
vendored
5
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +0,0 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Suggestions and ideas?
|
||||
url: https://www.tt-forums.net/viewforum.php?f=32
|
||||
about: Have a suggestion or an idea for a cool new feature? Post them on our forum!
|
37
.github/ISSUE_TEMPLATE/crash.yaml
vendored
37
.github/ISSUE_TEMPLATE/crash.yaml
vendored
@@ -1,37 +0,0 @@
|
||||
name: Crash
|
||||
description: Did OpenTTD crash?
|
||||
title: "[Crash]: "
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this crash report!
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version of OpenTTD
|
||||
description: Fill in below what version of OpenTTD you are using, including your OS.
|
||||
placeholder: ex. 1.11.2, Windows 10
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: Please spend a few words if you can reproduce this problem.
|
||||
placeholder: |
|
||||
1. Bought a new train.
|
||||
2. The game crashed.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: crashlogs
|
||||
attributes:
|
||||
label: Upload crash files
|
||||
description: With the `crash.log`, `crash.dmp`, and `crash.sav` we can analyze the crash in detail; this way you allow us to easier triage and fix the problem.
|
||||
placeholder: |
|
||||
1. Zip the `crash.log`, `crash.dmp` and `crash.sav`.
|
||||
2. Click on this field.
|
||||
3. Drag and drop the zip file in here.
|
||||
validations:
|
||||
required: true
|
49
.github/PULL_REQUEST_TEMPLATE.md
vendored
49
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,49 +0,0 @@
|
||||
## Motivation / Problem
|
||||
|
||||
<!--
|
||||
Describe here shortly
|
||||
* For bug fixes:
|
||||
* What problem does this solve?
|
||||
* If there is already an issue, link the issue, otherwise describe the problem here.
|
||||
* For features or gameplay changes:
|
||||
* What was the motivation to develop this feature?
|
||||
* Does this address any problem with the gameplay or interface?
|
||||
* Which group of players do you think would enjoy this feature?
|
||||
-->
|
||||
|
||||
|
||||
## Description
|
||||
|
||||
<!--
|
||||
Describe here shortly
|
||||
* For bug fixes:
|
||||
* How is the problem solved?
|
||||
* For features or gameplay changes:
|
||||
* What does this feature do?
|
||||
* How does it improve/solve the situation described under 'motivation'.
|
||||
-->
|
||||
|
||||
|
||||
## Limitations
|
||||
|
||||
<!--
|
||||
Describe here
|
||||
* Is the problem solved in all scenarios?
|
||||
* Is this feature complete? Are there things that could be added in the future?
|
||||
* Are there things that are intentionally left out?
|
||||
* Do you know of a bug or corner case that does not work?
|
||||
-->
|
||||
|
||||
|
||||
## Checklist for review
|
||||
|
||||
Some things are not automated, and forgotten often. This list is a reminder for the reviewers.
|
||||
* The bug fix is important enough to be backported? (label: 'backport requested')
|
||||
* This PR touches english.txt or translations? Check the [guidelines](https://github.com/OpenTTD/OpenTTD/blob/master/docs/eints.md)
|
||||
* This PR affects the save game format? (label 'savegame upgrade')
|
||||
* This PR affects the GS/AI API? (label 'needs review: Script API')
|
||||
* ai_changelog.hpp, game_changelog.hpp need updating.
|
||||
* The compatibility wrappers (compat_*.nut) need updating.
|
||||
* This PR affects the NewGRF API? (label 'needs review: NewGRF')
|
||||
* newgrf_debug_data.h may need updating.
|
||||
* [PR must be added to API tracker](https://wiki.openttd.org/en/Development/NewGRF/Specification%20Status)
|
12
.github/codeql/codeql-config.yml
vendored
12
.github/codeql/codeql-config.yml
vendored
@@ -1,12 +0,0 @@
|
||||
name: openttd
|
||||
queries:
|
||||
- uses: security-and-quality
|
||||
query-filters:
|
||||
- exclude:
|
||||
id:
|
||||
# Only feasible way is to move away from fopen; fopen_s is optional C11 and not implemented on most platforms.
|
||||
- cpp/world-writable-file-creation
|
||||
# Basically OpenTTD's coding style for adding things like ..._INVALID to enumerations
|
||||
- cpp/irregular-enum-init
|
||||
# Our GUI code tends to use switches for OnClick handlers, DrawWidget, and UpdateWidgetSize. Sometimes GUIs just don't have many elements, but we want to keep consistency.
|
||||
- cpp/trivial-switch
|
71
.github/script-missing-mode-enforcement.py
vendored
71
.github/script-missing-mode-enforcement.py
vendored
@@ -1,71 +0,0 @@
|
||||
"""
|
||||
Script to scan the OpenTTD's script API for functions that miss checks for the
|
||||
function being called from the right mode (deity or company mode).
|
||||
|
||||
When a function calls either ScriptObject::Command or ScriptObject::GetCompany
|
||||
then the function is considered dangerous. When one of the mode enforcement
|
||||
macros from script_error.hpp, i.e. EnforceDeityMode, EnforceCompanyModeValid or
|
||||
EnforceDeityOrCompanyModeValid, are called in the function, then we consider
|
||||
that the function has mode enforcement.
|
||||
|
||||
Any dangerous function for which no enforcement is found are emitted as errors.
|
||||
"""
|
||||
|
||||
import glob
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
def check_mode_enforcement(path):
|
||||
errors = []
|
||||
with open(path, "r") as reader:
|
||||
mode_enforcement_found = False
|
||||
dangerous_function = False
|
||||
for line in reader:
|
||||
# Line does not start with a tab and have <word>::<word>. That looks like the begin of a function, so reset the state.
|
||||
if re.match(r"^[^\t].*\w::\w", line):
|
||||
mode_enforcement_found = False
|
||||
dangerous_function = False
|
||||
currentFunction = line
|
||||
continue
|
||||
|
||||
if re.match(
|
||||
r"\t(EnforceDeityMode|EnforceCompanyModeValid|EnforceCompanyModeValid_Void|EnforceDeityOrCompanyModeValid|EnforceDeityOrCompanyModeValid_Void)\(",
|
||||
line,
|
||||
):
|
||||
# Mode enforcement macro found
|
||||
mode_enforcement_found = True
|
||||
continue
|
||||
|
||||
if re.match(r".*(ScriptObject::Command|ScriptObject::GetCompany).*", line):
|
||||
# Dangerous function found
|
||||
dangerous_function = True
|
||||
continue
|
||||
|
||||
# Line with only a closing bracket. That looks like the end of a function, so check for the dangerous function without mode enforcement
|
||||
if re.match(r"^}$", line) and dangerous_function and not mode_enforcement_found:
|
||||
function_name = currentFunction.rstrip("\n").replace("/* static */ ", "")
|
||||
errors.append(f"{path}: {function_name}")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def main():
|
||||
errors = []
|
||||
for path in sorted(glob.glob("src/script/api/*.cpp")):
|
||||
# Skip a number of files that yield only false positives
|
||||
if path.endswith(("script_object.cpp", "script_companymode.cpp", "script_controller.cpp", "script_game.cpp")):
|
||||
continue
|
||||
|
||||
errors.extend(check_mode_enforcement(path))
|
||||
|
||||
if errors:
|
||||
print("Mode enforcement was expected in the following files/functions:")
|
||||
print("\n".join(errors))
|
||||
sys.exit(1)
|
||||
|
||||
print("OK")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
34
.github/stale.yml
vendored
Normal file
34
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
daysUntilClose: 7
|
||||
staleLabel: stale
|
||||
closeComment: false
|
||||
exemptMilestones: true
|
||||
exemptAssignees: true
|
||||
|
||||
issues:
|
||||
daysUntilStale: 60
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
- "good first issue"
|
||||
- regression
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had any activity in the last two months.
|
||||
|
||||
If you believe the issue is still relevant, please test on the latest nightly and report back.
|
||||
|
||||
It will be closed if no further activity occurs within 7 days.
|
||||
|
||||
Thank you for your contributions.
|
||||
|
||||
pulls:
|
||||
daysUntilStale: 30
|
||||
exemptLabels:
|
||||
- pinned
|
||||
markComment: >
|
||||
This pull request has been automatically marked as stale because it has not had any activity in the last month.
|
||||
|
||||
Please feel free to give a status update now, ping for review, or re-open when it's ready.
|
||||
|
||||
It will be closed if no further activity occurs within 7 days.
|
||||
|
||||
Thank you for your contributions.
|
220
.github/unused-strings.py
vendored
220
.github/unused-strings.py
vendored
@@ -1,220 +0,0 @@
|
||||
"""
|
||||
Script to scan the OpenTTD source-tree for STR_ entries that are defined but
|
||||
no longer used.
|
||||
|
||||
This is not completely trivial, as OpenTTD references a lot of strings in
|
||||
relation to another string. The most obvious example of this is a list. OpenTTD
|
||||
only references the first entry in the list, and does "+ <var>" to get to the
|
||||
correct string.
|
||||
|
||||
There are other ways OpenTTD does use relative values. This script tries to
|
||||
account for all of them, to give the best approximation we have for "this
|
||||
string is unused".
|
||||
"""
|
||||
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from enum import Enum
|
||||
|
||||
LENGTH_NAME_LOOKUP = {
|
||||
"VEHICLE_TYPES": 4,
|
||||
}
|
||||
|
||||
|
||||
class SkipType(Enum):
|
||||
NONE = 1
|
||||
LENGTH = 2
|
||||
EXTERNAL = 3
|
||||
ZERO_IS_SPECIAL = 4
|
||||
EXPECT_NEWLINE = 5
|
||||
|
||||
|
||||
def read_language_file(filename, strings_found, errors):
|
||||
strings_defined = []
|
||||
|
||||
skip = SkipType.NONE
|
||||
length = 0
|
||||
common_prefix = ""
|
||||
last_tiny_string = ""
|
||||
|
||||
with open(filename) as fp:
|
||||
for line in fp.readlines():
|
||||
if not line.strip():
|
||||
if skip == SkipType.EXPECT_NEWLINE:
|
||||
skip = SkipType.NONE
|
||||
continue
|
||||
|
||||
line = line.strip()
|
||||
|
||||
if skip == SkipType.EXPECT_NEWLINE:
|
||||
# The only thing allowed after a list, is this next marker, or a newline.
|
||||
if line == "###next-name-looks-similar":
|
||||
# "###next-name-looks-similar"
|
||||
# Indicates the common prefix of the last list has a very
|
||||
# similar name to the next entry, but isn't part of the
|
||||
# list. So do not emit a warning about them looking very
|
||||
# similar.
|
||||
|
||||
if length != 0:
|
||||
errors.append(f"ERROR: list around {name} is shorted than indicated by ###length")
|
||||
|
||||
common_prefix = ""
|
||||
else:
|
||||
errors.append(f"ERROR: expected a newline after a list, but didn't find any around {name}. Did you add an entry to the list without increasing the length?")
|
||||
|
||||
skip = SkipType.NONE
|
||||
|
||||
if line[0] == "#":
|
||||
if line.startswith("###length "):
|
||||
# "###length <count>"
|
||||
# Indicates the next few entries are part of a list. Only
|
||||
# the first entry is possibly referenced, and the rest are
|
||||
# indirectly.
|
||||
|
||||
if length != 0:
|
||||
errors.append(f"ERROR: list around {name} is shorted than indicated by ###length")
|
||||
|
||||
length = line.split(" ")[1].strip()
|
||||
|
||||
if length.isnumeric():
|
||||
length = int(length)
|
||||
else:
|
||||
length = LENGTH_NAME_LOOKUP[length]
|
||||
|
||||
skip = SkipType.LENGTH
|
||||
elif line.startswith("###external "):
|
||||
# "###external <count>"
|
||||
# Indicates the next few entries are used outside the
|
||||
# source and will not be referenced.
|
||||
|
||||
if length != 0:
|
||||
errors.append(f"ERROR: list around {name} is shorted than indicated by ###length")
|
||||
|
||||
length = line.split(" ")[1].strip()
|
||||
length = int(length)
|
||||
|
||||
skip = SkipType.EXTERNAL
|
||||
elif line.startswith("###setting-zero-is-special"):
|
||||
# "###setting-zero-is-special"
|
||||
# Indicates the next entry is part of the "zero is special"
|
||||
# flag of settings. These entries are not referenced
|
||||
# directly in the code.
|
||||
|
||||
if length != 0:
|
||||
errors.append(f"ERROR: list around {name} is shorted than indicated by ###length")
|
||||
|
||||
skip = SkipType.ZERO_IS_SPECIAL
|
||||
|
||||
continue
|
||||
|
||||
name = line.split(":")[0].strip()
|
||||
strings_defined.append(name)
|
||||
|
||||
# If a string ends on _TINY or _SMALL, it can be the {TINY} variant.
|
||||
# Check for this by some fuzzy matching.
|
||||
if name.endswith(("_SMALL", "_TINY")):
|
||||
last_tiny_string = name
|
||||
elif last_tiny_string:
|
||||
matching_name = "_".join(last_tiny_string.split("_")[:-1])
|
||||
if name == matching_name:
|
||||
strings_found.add(last_tiny_string)
|
||||
else:
|
||||
last_tiny_string = ""
|
||||
|
||||
if skip == SkipType.EXTERNAL:
|
||||
strings_found.add(name)
|
||||
skip = SkipType.LENGTH
|
||||
|
||||
if skip == SkipType.LENGTH:
|
||||
skip = SkipType.NONE
|
||||
length -= 1
|
||||
common_prefix = name
|
||||
elif skip == SkipType.ZERO_IS_SPECIAL:
|
||||
strings_found.add(name)
|
||||
elif length > 0:
|
||||
strings_found.add(name)
|
||||
length -= 1
|
||||
|
||||
# Find the common prefix of these strings
|
||||
for i in range(len(common_prefix)):
|
||||
if common_prefix[0 : i + 1] != name[0 : i + 1]:
|
||||
common_prefix = common_prefix[0:i]
|
||||
break
|
||||
|
||||
if length == 0:
|
||||
skip = SkipType.EXPECT_NEWLINE
|
||||
|
||||
if len(common_prefix) < 6:
|
||||
errors.append(f"ERROR: common prefix of block including {name} was reduced to {common_prefix}. This means the names in the list are not consistent.")
|
||||
elif common_prefix:
|
||||
if name.startswith(common_prefix):
|
||||
errors.append(f"ERROR: {name} looks a lot like block above with prefix {common_prefix}. This mostly means that the list length was too short. Use '###next-name-looks-similar' if it is not.")
|
||||
common_prefix = ""
|
||||
|
||||
return strings_defined
|
||||
|
||||
|
||||
def scan_source_files(path, strings_found):
|
||||
for new_path in glob.glob(f"{path}/*"):
|
||||
if os.path.isdir(new_path):
|
||||
scan_source_files(new_path, strings_found)
|
||||
continue
|
||||
|
||||
if not new_path.endswith((".c", ".h", ".cpp", ".hpp", ".ini")):
|
||||
continue
|
||||
|
||||
# Most files we can just open, but some use magic, that requires the
|
||||
# G++ preprocessor before we can make sense out of it.
|
||||
if new_path == "src/table/cargo_const.h":
|
||||
p = subprocess.run(["g++", "-E", new_path], stdout=subprocess.PIPE)
|
||||
output = p.stdout.decode()
|
||||
else:
|
||||
with open(new_path) as fp:
|
||||
output = fp.read()
|
||||
|
||||
# Find all the string references.
|
||||
matches = re.findall(r"[^A-Z_](STR_[A-Z0-9_]*)", output)
|
||||
strings_found.update(matches)
|
||||
|
||||
|
||||
def main():
|
||||
strings_found = set()
|
||||
errors = []
|
||||
|
||||
scan_source_files("src", strings_found)
|
||||
strings_defined = read_language_file("src/lang/english.txt", strings_found, errors)
|
||||
|
||||
# STR_LAST_STRINGID is special, and not really a string.
|
||||
strings_found.remove("STR_LAST_STRINGID")
|
||||
# These are mentioned in comments, not really a string.
|
||||
strings_found.remove("STR_XXX")
|
||||
strings_found.remove("STR_NEWS")
|
||||
strings_found.remove("STR_CONTENT_TYPE_")
|
||||
|
||||
# This string is added for completion, but never used.
|
||||
strings_defined.remove("STR_JUST_DATE_SHORT")
|
||||
|
||||
strings_defined = sorted(strings_defined)
|
||||
strings_found = sorted(list(strings_found))
|
||||
|
||||
for string in strings_found:
|
||||
if string not in strings_defined:
|
||||
errors.append(f"ERROR: {string} found but never defined.")
|
||||
|
||||
for string in strings_defined:
|
||||
if string not in strings_found:
|
||||
errors.append(f"ERROR: {string} is (possibly) no longer needed.")
|
||||
|
||||
if errors:
|
||||
print("\n".join(errors))
|
||||
sys.exit(1)
|
||||
|
||||
print("OK")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
50
.github/windowdesc-ini-key.py
vendored
50
.github/windowdesc-ini-key.py
vendored
@@ -1,50 +0,0 @@
|
||||
"""
|
||||
Script to scan the OpenTTD source-tree for ini_key issues in WindowDesc entries.
|
||||
"""
|
||||
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
def scan_source_files(path, ini_keys=None):
|
||||
if ini_keys is None:
|
||||
ini_keys = set()
|
||||
|
||||
errors = []
|
||||
|
||||
for new_path in glob.glob(f"{path}/*.cpp"):
|
||||
if os.path.isdir(new_path):
|
||||
errors.append(scan_source_files(new_path, ini_keys))
|
||||
continue
|
||||
|
||||
with open(new_path) as fp:
|
||||
output = fp.read()
|
||||
|
||||
for (name, ini_key, widgets) in re.findall(r"^static WindowDesc ([a-zA-Z0-9_]*).*?, (?:\"(.*?)\")?.*?,(?:\s+.*?,){6}\s+[^\s]+\((.*?)\)", output, re.S|re.M):
|
||||
if ini_key:
|
||||
if ini_key in ini_keys:
|
||||
errors.append(f"{new_path}: {name} ini_key is a duplicate")
|
||||
ini_keys.add(ini_key)
|
||||
widget = re.findall("static const (?:struct )?NWidgetPart " + widgets + ".*?(?:WWT_(DEFSIZE|STICKY)BOX.*?)?;", output, re.S)[0]
|
||||
if widget and not ini_key:
|
||||
errors.append(f"{new_path}: {name} has WWT_DEFSIZEBOX/WWT_STICKYBOX without ini_key")
|
||||
if ini_key and not widget:
|
||||
errors.append(f"{new_path}: {name} has ini_key without WWT_DEFSIZEBOX/WWT_STICKYBOX")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def main():
|
||||
errors = scan_source_files("src")
|
||||
|
||||
if errors:
|
||||
print("\n".join(errors))
|
||||
sys.exit(1)
|
||||
|
||||
print("OK")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
451
.github/workflows/ci-build.yml
vendored
451
.github/workflows/ci-build.yml
vendored
@@ -1,451 +0,0 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
CTEST_OUTPUT_ON_FAILURE: 1
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: ${{ github.ref != 'refs/heads/master' }}
|
||||
|
||||
jobs:
|
||||
emscripten:
|
||||
name: Emscripten
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
# If you change this version, change the number in the cache step too.
|
||||
image: emscripten/emsdk:3.1.42
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /emsdk/upstream/emscripten/cache
|
||||
key: 3.1.42-${{ runner.os }}
|
||||
|
||||
- name: Patch Emscripten to support LZMA and nlohmann-json
|
||||
run: |
|
||||
cd /emsdk/upstream/emscripten
|
||||
patch -p1 < ${GITHUB_WORKSPACE}/os/emscripten/emsdk-liblzma.patch
|
||||
patch -p1 < ${GITHUB_WORKSPACE}/os/emscripten/emsdk-nlohmann-json.patch
|
||||
|
||||
- name: Build (host tools)
|
||||
run: |
|
||||
mkdir build-host
|
||||
cd build-host
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake .. -DOPTION_TOOLS_ONLY=ON
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(nproc) cores"
|
||||
cmake --build . -j $(nproc) --target tools
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
emcmake cmake .. -DHOST_BINARY_DIR=../build-host
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(nproc) cores"
|
||||
cmake --build . -j $(nproc) --target openttd
|
||||
echo "::endgroup::"
|
||||
|
||||
linux:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- name: Clang
|
||||
compiler: clang
|
||||
cxxcompiler: clang++
|
||||
libraries: libsdl2-dev
|
||||
- name: GCC - SDL2
|
||||
compiler: gcc
|
||||
cxxcompiler: g++
|
||||
libraries: libsdl2-dev
|
||||
- name: GCC - SDL1.2
|
||||
compiler: gcc
|
||||
cxxcompiler: g++
|
||||
libraries: libsdl1.2-dev
|
||||
- name: GCC - Dedicated
|
||||
compiler: gcc
|
||||
cxxcompiler: g++
|
||||
extra-cmake-parameters: -DOPTION_DEDICATED=ON -DCMAKE_CXX_FLAGS_INIT="-DRANDOM_DEBUG" -DCMAKE_DISABLE_PRECOMPILE_HEADERS=ON
|
||||
# Compile without SDL / SDL2, as that should compile fine too.
|
||||
|
||||
name: Linux (${{ matrix.name }})
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
CC: ${{ matrix.compiler }}
|
||||
CXX: ${{ matrix.cxxcompiler }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
echo "::group::Update apt"
|
||||
sudo apt-get update
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Install dependencies"
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
liballegro4-dev \
|
||||
libcurl4-openssl-dev \
|
||||
libfontconfig-dev \
|
||||
libharfbuzz-dev \
|
||||
libicu-dev \
|
||||
liblzma-dev \
|
||||
liblzo2-dev \
|
||||
nlohmann-json3-dev \
|
||||
${{ matrix.libraries }} \
|
||||
zlib1g-dev \
|
||||
# EOF
|
||||
|
||||
sudo vcpkg install \
|
||||
breakpad \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
|
||||
- name: Get OpenGFX
|
||||
run: |
|
||||
mkdir -p ~/.local/share/openttd/baseset
|
||||
cd ~/.local/share/openttd/baseset
|
||||
|
||||
echo "::group::Download OpenGFX"
|
||||
curl -L https://cdn.openttd.org/opengfx-releases/0.6.0/opengfx-0.6.0-all.zip -o opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Unpack OpenGFX"
|
||||
unzip opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
rm -f opengfx-all.zip
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake .. -DCMAKE_TOOLCHAIN_FILE=/usr/local/share/vcpkg/scripts/buildsystems/vcpkg.cmake ${{ matrix.extra-cmake-parameters }}
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(nproc) cores"
|
||||
cmake --build . -j $(nproc)
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
cd build
|
||||
ctest -j $(nproc) --timeout 120
|
||||
|
||||
macos:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arch: x64
|
||||
full_arch: x86_64
|
||||
|
||||
name: Mac OS (${{ matrix.arch }})
|
||||
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.13
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install dependencies
|
||||
env:
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install \
|
||||
pkg-config \
|
||||
# EOF
|
||||
|
||||
- name: Prepare cache key
|
||||
id: key
|
||||
run: |
|
||||
echo "image=$ImageOS-$ImageVersion" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Enable vcpkg cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /usr/local/share/vcpkg/installed
|
||||
key: ${{ steps.key.outputs.image }}-vcpkg-${{ matrix.arch }}-1 # Increase the number whenever dependencies are modified
|
||||
restore-keys: |
|
||||
${{ steps.key.outputs.image }}-vcpkg-${{ matrix.arch }}
|
||||
|
||||
- name: Prepare vcpkg
|
||||
run: |
|
||||
vcpkg install --triplet=${{ matrix.arch }}-osx \
|
||||
breakpad \
|
||||
curl \
|
||||
liblzma \
|
||||
libpng \
|
||||
lzo \
|
||||
nlohmann-json \
|
||||
zlib \
|
||||
# EOF
|
||||
|
||||
- name: Install OpenGFX
|
||||
run: |
|
||||
mkdir -p ~/Documents/OpenTTD/baseset
|
||||
cd ~/Documents/OpenTTD/baseset
|
||||
|
||||
echo "::group::Download OpenGFX"
|
||||
curl -L https://cdn.openttd.org/opengfx-releases/0.6.0/opengfx-0.6.0-all.zip -o opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Unpack OpenGFX"
|
||||
unzip opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
rm -f opengfx-all.zip
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake .. \
|
||||
-DCMAKE_OSX_ARCHITECTURES=${{ matrix.full_arch }} \
|
||||
-DVCPKG_TARGET_TRIPLET=${{ matrix.arch }}-osx \
|
||||
-DCMAKE_TOOLCHAIN_FILE=/usr/local/share/vcpkg/scripts/buildsystems/vcpkg.cmake \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(sysctl -n hw.logicalcpu) cores"
|
||||
cmake --build . -j $(sysctl -n hw.logicalcpu)
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
cd build
|
||||
ctest -j $(sysctl -n hw.logicalcpu) --timeout 120
|
||||
|
||||
windows:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [windows-latest, windows-2019]
|
||||
arch: [x86, x64]
|
||||
|
||||
name: Windows (${{ matrix.os }} / ${{ matrix.arch }})
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare cache key
|
||||
id: key
|
||||
shell: powershell
|
||||
run: |
|
||||
# Work around caching failure with GNU tar
|
||||
New-Item -Type Junction -Path vcpkg -Target c:\vcpkg
|
||||
|
||||
Write-Output "image=$env:ImageOS-$env:ImageVersion" >> $env:GITHUB_OUTPUT
|
||||
|
||||
- name: Enable vcpkg cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: vcpkg/installed
|
||||
key: ${{ steps.key.outputs.image }}-vcpkg-${{ matrix.arch }}-1 # Increase the number whenever dependencies are modified
|
||||
restore-keys: |
|
||||
${{ steps.key.outputs.image }}-vcpkg-${{ matrix.arch }}
|
||||
|
||||
- name: Prepare vcpkg
|
||||
shell: bash
|
||||
run: |
|
||||
vcpkg install --triplet=${{ matrix.arch }}-windows-static \
|
||||
breakpad \
|
||||
liblzma \
|
||||
libpng \
|
||||
lzo \
|
||||
nlohmann-json \
|
||||
zlib \
|
||||
# EOF
|
||||
|
||||
- name: Install OpenGFX
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p "C:/Users/Public/Documents/OpenTTD/baseset"
|
||||
cd "C:/Users/Public/Documents/OpenTTD/baseset"
|
||||
|
||||
echo "::group::Download OpenGFX"
|
||||
curl -L https://cdn.openttd.org/opengfx-releases/0.6.0/opengfx-0.6.0-all.zip -o opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Unpack OpenGFX"
|
||||
unzip opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
rm -f opengfx-all.zip
|
||||
|
||||
- name: Install MSVC problem matcher
|
||||
uses: ammaraskar/msvc-problem-matcher@master
|
||||
|
||||
- name: Configure developer command prompt for ${{ matrix.arch }}
|
||||
uses: ilammy/msvc-dev-cmd@v1
|
||||
with:
|
||||
arch: ${{ matrix.arch }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
env:
|
||||
NINJA_STATUS: "[%f/%t -- %e] " # [finished_edges/total_edges -- elapsed_time], default value is "[%f/%t] "
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake .. \
|
||||
-GNinja \
|
||||
-DVCPKG_TARGET_TRIPLET=${{ matrix.arch }}-windows-static \
|
||||
-DCMAKE_TOOLCHAIN_FILE="c:\vcpkg\scripts\buildsystems\vcpkg.cmake" \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
cmake --build .
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: |
|
||||
cd build
|
||||
ctest --timeout 120
|
||||
|
||||
|
||||
msys2:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- msystem: MINGW64
|
||||
arch: x86_64
|
||||
- msystem: MINGW32
|
||||
arch: i686
|
||||
|
||||
name: MinGW (${{ matrix.arch }})
|
||||
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup MSYS2
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
msystem: ${{ matrix.msystem }}
|
||||
release: false
|
||||
install: >-
|
||||
git
|
||||
make
|
||||
mingw-w64-${{ matrix.arch }}-cmake
|
||||
mingw-w64-${{ matrix.arch }}-gcc
|
||||
mingw-w64-${{ matrix.arch }}-lzo2
|
||||
mingw-w64-${{ matrix.arch }}-libpng
|
||||
mingw-w64-${{ matrix.arch }}-lld
|
||||
mingw-w64-${{ matrix.arch }}-ninja
|
||||
mingw-w64-${{ matrix.arch }}-nlohmann-json
|
||||
|
||||
- name: Install OpenGFX
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p "C:/Users/Public/Documents/OpenTTD/baseset"
|
||||
cd "C:/Users/Public/Documents/OpenTTD/baseset"
|
||||
|
||||
echo "::group::Download OpenGFX"
|
||||
curl -L https://cdn.openttd.org/opengfx-releases/0.6.0/opengfx-0.6.0-all.zip -o opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Unpack OpenGFX"
|
||||
unzip opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
rm -f opengfx-all.zip
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build
|
||||
shell: msys2 {0}
|
||||
env:
|
||||
NINJA_STATUS: "[%f/%t -- %e] " # [finished_edges/total_edges -- elapsed_time], default value is "[%f/%t] "
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake .. \
|
||||
-GNinja \
|
||||
-DCMAKE_CXX_FLAGS="-fuse-ld=lld" \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
cmake --build .
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Test
|
||||
shell: msys2 {0}
|
||||
run: |
|
||||
cd build
|
||||
ctest --timeout 120
|
||||
|
||||
check_annotations:
|
||||
name: Check Annotations
|
||||
needs:
|
||||
- emscripten
|
||||
- linux
|
||||
- macos
|
||||
- windows
|
||||
- msys2
|
||||
|
||||
if: always() && github.event_name == 'pull_request'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check annotations
|
||||
uses: OpenTTD/actions/annotation-check@v3
|
86
.github/workflows/codeql.yml
vendored
86
.github/workflows/codeql.yml
vendored
@@ -1,86 +0,0 @@
|
||||
name: CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches:
|
||||
- master
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: ${{ github.ref != 'refs/heads/master' }}
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
echo "::group::Update apt"
|
||||
sudo apt-get update
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Install dependencies"
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
liballegro4-dev \
|
||||
libcurl4-openssl-dev \
|
||||
libfontconfig-dev \
|
||||
libharfbuzz-dev \
|
||||
libicu-dev \
|
||||
liblzma-dev \
|
||||
liblzo2-dev \
|
||||
libsdl2-dev \
|
||||
nlohmann-json3-dev \
|
||||
zlib1g-dev \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
|
||||
- name: Set number of make jobs
|
||||
run: |
|
||||
echo "MAKEFLAGS=-j$(nproc)" >> $GITHUB_ENV
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: cpp
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: /language:cpp
|
||||
upload: False
|
||||
output: sarif-results
|
||||
|
||||
- name: Filter out table & generated code
|
||||
uses: advanced-security/filter-sarif@v1
|
||||
with:
|
||||
patterns: |
|
||||
+**/*.*
|
||||
-**/table/*.*
|
||||
-**/generated/**/*.*
|
||||
-**/tests/*.*
|
||||
input: sarif-results/cpp.sarif
|
||||
output: sarif-results/cpp.sarif
|
||||
|
||||
- name: Upload results
|
||||
uses: github/codeql-action/upload-sarif@v2
|
||||
with:
|
||||
sarif_file: sarif-results/cpp.sarif
|
34
.github/workflows/commit-checker.yml
vendored
34
.github/workflows/commit-checker.yml
vendored
@@ -3,10 +3,6 @@ name: Commit checker
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: ${{ github.ref != 'refs/heads/master' }}
|
||||
|
||||
jobs:
|
||||
commit-checker:
|
||||
name: Commit checker
|
||||
@@ -14,12 +10,36 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 4
|
||||
|
||||
- name: Get pull-request commits
|
||||
uses: OpenTTD/actions/checkout-pull-request@v2
|
||||
run: |
|
||||
set -x
|
||||
# actions/checkout did a merge checkout of the pull-request. As such, the first
|
||||
# commit is the merge commit. This means that on HEAD^ is the base branch, and
|
||||
# on HEAD^2 are the commits from the pull-request. We now check if those trees
|
||||
# have a common parent. If not, we fetch a few more commits till we do. In result,
|
||||
# the log between HEAD^ and HEAD^2 will be the commits in the pull-request.
|
||||
DEPTH=4
|
||||
while [ -z "$(git merge-base HEAD^ HEAD^2)" ]; do
|
||||
git -c protocol.version=2 fetch --no-tags --prune --progress --no-recurse-submodules --deepen=${DEPTH} origin HEAD
|
||||
DEPTH=$(( ${DEPTH} * 4 ))
|
||||
done
|
||||
|
||||
# Just to show which commits we are going to evaluate.
|
||||
git log --oneline HEAD^..HEAD^2
|
||||
|
||||
- name: Checkout commit-checker
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: OpenTTD/OpenTTD-git-hooks
|
||||
path: git-hooks
|
||||
ref: master
|
||||
|
||||
- name: Check commits
|
||||
uses: OpenTTD/OpenTTD-git-hooks@main
|
||||
run: |
|
||||
set -x
|
||||
HOOKS_DIR=./git-hooks/hooks GIT_DIR=.git ./git-hooks/hooks/check-commits.sh HEAD^..HEAD^2
|
||||
echo "Commit checks passed"
|
||||
|
104
.github/workflows/preview-build.yml
vendored
104
.github/workflows/preview-build.yml
vendored
@@ -1,104 +0,0 @@
|
||||
name: Preview build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
PREVIEW_CLOUDFLARE_API_TOKEN:
|
||||
description: API token to upload a preview to Cloudflare Pages
|
||||
required: true
|
||||
PREVIEW_CLOUDFLARE_ACCOUNT_ID:
|
||||
description: Account ID to upload a preview to Cloudflare Pages
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
preview:
|
||||
name: Build preview
|
||||
|
||||
environment:
|
||||
name: preview
|
||||
url: https://preview.openttd.org/pr${{ github.event.pull_request.number }}/
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# If you change this version, change the number in the cache step too.
|
||||
image: emscripten/emsdk:3.1.42
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Name branch
|
||||
run: |
|
||||
git config --global --add safe.directory ${GITHUB_WORKSPACE}
|
||||
git checkout -b pr${{ github.event.pull_request.number }}
|
||||
|
||||
- name: Setup cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /emsdk/upstream/emscripten/cache
|
||||
key: 3.1.42-${{ runner.os }}
|
||||
|
||||
- name: Patch Emscripten to support LZMA and nlohmann_json
|
||||
run: |
|
||||
cd /emsdk/upstream/emscripten
|
||||
patch -p1 < ${GITHUB_WORKSPACE}/os/emscripten/emsdk-liblzma.patch
|
||||
patch -p1 < ${GITHUB_WORKSPACE}/os/emscripten/emsdk-nlohmann-json.patch
|
||||
|
||||
- name: Build (host tools)
|
||||
run: |
|
||||
mkdir build-host
|
||||
cd build-host
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake .. -DOPTION_TOOLS_ONLY=ON
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(nproc) cores"
|
||||
make -j$(nproc) tools
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
emcmake cmake .. \
|
||||
-DHOST_BINARY_DIR=../build-host \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(nproc) cores"
|
||||
cmake --build . -j $(nproc) --target openttd
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Prepare preview
|
||||
run: |
|
||||
mkdir public
|
||||
|
||||
cp build/openttd.data public/
|
||||
cp build/openttd.html public/
|
||||
cp build/openttd.js public/
|
||||
cp build/openttd.wasm public/
|
||||
|
||||
# Ensure we use the latest version of npm; the one we get with current
|
||||
# emscripten doesn't allow running "npx wrangler" as root.
|
||||
# Current emscripten can't install npm>=10.0.0 because node is too old.
|
||||
npm install -g npm@9
|
||||
|
||||
- name: Publish preview
|
||||
uses: cloudflare/pages-action@v1
|
||||
with:
|
||||
apiToken: ${{ secrets.PREVIEW_CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.PREVIEW_CLOUDFLARE_ACCOUNT_ID }}
|
||||
projectName: ${{ vars.PREVIEW_CLOUDFLARE_PROJECT_NAME }}
|
||||
directory: public
|
||||
branch: pr${{ github.event.pull_request.number }}
|
20
.github/workflows/preview.yml
vendored
20
.github/workflows/preview.yml
vendored
@@ -1,20 +0,0 @@
|
||||
name: Preview
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- labeled
|
||||
- synchronize
|
||||
branches:
|
||||
- master
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
preview:
|
||||
if: ${{ (github.event.action == 'labeled' && github.event.label.name == 'preview') || (github.event.action != 'labeled' && contains(github.event.pull_request.labels.*.name, 'preview')) }}
|
||||
name: Preview
|
||||
uses: ./.github/workflows/preview-build.yml
|
||||
secrets: inherit
|
85
.github/workflows/release-docs.yml
vendored
85
.github/workflows/release-docs.yml
vendored
@@ -1,85 +0,0 @@
|
||||
name: Release (Docs)
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
docs:
|
||||
name: Docs
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Download source
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: internal-source
|
||||
|
||||
- name: Unpack source
|
||||
run: |
|
||||
tar -xf source.tar.gz --strip-components=1
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
echo "::group::Update apt"
|
||||
sudo apt-get update
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Install dependencies"
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
doxygen \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mkdir -p ${GITHUB_WORKSPACE}/build
|
||||
cd ${GITHUB_WORKSPACE}/build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DOPTION_DOCS_ONLY=ON \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
cmake --build . --target docs
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Create bundles
|
||||
run: |
|
||||
BASENAME=openttd-${{ inputs.version }}
|
||||
|
||||
cd ${GITHUB_WORKSPACE}/build
|
||||
|
||||
mv docs/source ${BASENAME}-docs
|
||||
mv docs/ai-api ${BASENAME}-docs-ai
|
||||
mv docs/gs-api ${BASENAME}-docs-gs
|
||||
|
||||
mkdir -p bundles
|
||||
|
||||
echo "::group::Create docs bundle"
|
||||
tar --xz -cf bundles/${BASENAME}-docs.tar.xz ${BASENAME}-docs
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Create AI API docs bundle"
|
||||
tar --xz -cf bundles/${BASENAME}-docs-ai.tar.xz ${BASENAME}-docs-ai
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Create GameScript API docs bundle"
|
||||
tar --xz -cf bundles/${BASENAME}-docs-gs.tar.xz ${BASENAME}-docs-gs
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Store bundles
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: openttd-docs
|
||||
path: build/bundles/*.tar.xz
|
||||
retention-days: 5
|
196
.github/workflows/release-linux.yml
vendored
196
.github/workflows/release-linux.yml
vendored
@@ -1,196 +0,0 @@
|
||||
name: Release (Linux)
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
survey_key:
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
name: Linux (Generic)
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
# manylinux2014 is based on CentOS 7, but already has a lot of things
|
||||
# installed and preconfigured. It makes it easier to build OpenTTD.
|
||||
image: quay.io/pypa/manylinux2014_x86_64
|
||||
|
||||
steps:
|
||||
- name: Download source
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: internal-source
|
||||
|
||||
- name: Unpack source
|
||||
run: |
|
||||
tar -xf source.tar.gz --strip-components=1
|
||||
|
||||
# dtolnay/rust-toolchain uses a curl argument (--retry-connrefused) that the curl shipped with our container doesn't support.
|
||||
# So we need to do one part ourselves; the action takes over the rest and prepares the setup further.
|
||||
- name: Prepare Rust toolchain
|
||||
run: |
|
||||
curl --proto '=https' --tlsv1.2 --retry 10 --location --silent --show-error --fail "https://sh.rustup.rs" | sh -s -- --default-toolchain none -y
|
||||
echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Enable Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Enable vcpkg cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /vcpkg/installed
|
||||
key: ubuntu-20.04-vcpkg-release-1 # Increase the number whenever dependencies are modified
|
||||
restore-keys: |
|
||||
ubuntu-20.04-vcpkg-release
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
echo "::group::Install system dependencies"
|
||||
# perl-IPC-Cmd, wget, and zip are needed to run vcpkg.
|
||||
# autoconf-archive is needed to build ICU.
|
||||
yum install -y \
|
||||
autoconf-archive \
|
||||
perl-IPC-Cmd \
|
||||
wget \
|
||||
zip \
|
||||
# EOF
|
||||
|
||||
# aclocal looks first in /usr/local/share/aclocal, and if that doesn't
|
||||
# exist only looks in /usr/share/aclocal. We have files in both that
|
||||
# are important. So copy the latter to the first, and we are good to
|
||||
# go.
|
||||
cp /usr/share/aclocal/* /usr/local/share/aclocal/
|
||||
echo "::endgroup::"
|
||||
|
||||
# The yum variant of fluidsynth depends on all possible audio drivers,
|
||||
# like jack, ALSA, pulseaudio, etc. This is not really useful for us,
|
||||
# as we route the output of fluidsynth back via our sound driver, and
|
||||
# as such do not use these audio driver outputs at all.
|
||||
# The vcpkg variant of fluidsynth depends on ALSA. Similar issue here.
|
||||
# So instead, we compile fluidsynth ourselves, with as few
|
||||
# dependencies as possible. We do it before anything else is installed,
|
||||
# to make sure it doesn't pick up on any of the drivers.
|
||||
echo "::group::Install fluidsynth"
|
||||
wget https://github.com/FluidSynth/fluidsynth/archive/v2.3.3.tar.gz
|
||||
tar xf v2.3.3.tar.gz
|
||||
(
|
||||
cd fluidsynth-2.3.3
|
||||
mkdir build
|
||||
cd build
|
||||
cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX=/usr
|
||||
cmake --build . -j $(nproc)
|
||||
cmake --install .
|
||||
)
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Install audio drivers"
|
||||
# These audio libs are to make sure the SDL version of vcpkg adds
|
||||
# sound-support; these libraries are not added to the resulting
|
||||
# binary, but the headers are used to enable them in SDL.
|
||||
yum install -y \
|
||||
alsa-lib-devel \
|
||||
jack-audio-connection-kit-devel \
|
||||
pulseaudio-libs-devel \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
# We use vcpkg for our dependencies, to get more up-to-date version.
|
||||
echo "::group::Install vcpkg and dependencies"
|
||||
|
||||
# We do a little dance to make sure we copy the cached install folder
|
||||
# into our new clone.
|
||||
git clone --depth=1 https://github.com/microsoft/vcpkg /vcpkg-clone
|
||||
if [ -e /vcpkg/installed ]; then
|
||||
mv /vcpkg/installed /vcpkg-clone/
|
||||
rm -rf /vcpkg
|
||||
fi
|
||||
mv /vcpkg-clone /vcpkg
|
||||
|
||||
(
|
||||
cd /vcpkg
|
||||
./bootstrap-vcpkg.sh -disableMetrics
|
||||
|
||||
# Make Python3 available for other packages.
|
||||
./vcpkg install python3
|
||||
ln -sf $(pwd)/installed/x64-linux/tools/python3/python3.[0-9][0-9] /usr/bin/python3
|
||||
|
||||
./vcpkg install \
|
||||
breakpad \
|
||||
curl[http2] \
|
||||
fontconfig \
|
||||
freetype \
|
||||
harfbuzz \
|
||||
icu \
|
||||
liblzma \
|
||||
libpng \
|
||||
lzo \
|
||||
nlohmann-json \
|
||||
sdl2 \
|
||||
zlib \
|
||||
# EOF
|
||||
)
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Install breakpad dependencies"
|
||||
cargo install dump_syms
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mkdir -p build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-DCMAKE_TOOLCHAIN_FILE=/vcpkg/scripts/buildsystems/vcpkg.cmake \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DOPTION_SURVEY_KEY=${{ inputs.survey_key }} \
|
||||
-DOPTION_PACKAGE_DEPENDENCIES=ON \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(nproc) cores"
|
||||
cmake --build . -j $(nproc) --target openttd
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Create breakpad symbols
|
||||
run: |
|
||||
cd build
|
||||
dump_syms ./openttd --inlines --store symbols
|
||||
|
||||
- name: Create bundles
|
||||
run: |
|
||||
cd ${GITHUB_WORKSPACE}/build
|
||||
echo "::group::Run CPack"
|
||||
cpack
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Cleanup"
|
||||
# Remove the sha256 files CPack generates; we will do this ourself at
|
||||
# the end of this workflow.
|
||||
rm -f bundles/*.sha256
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Store bundles
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: openttd-linux-generic
|
||||
path: build/bundles
|
||||
retention-days: 5
|
||||
|
||||
- name: Store symbols
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: symbols-linux-generic
|
||||
path: build/symbols
|
||||
retention-days: 5
|
242
.github/workflows/release-macos.yml
vendored
242
.github/workflows/release-macos.yml
vendored
@@ -1,242 +0,0 @@
|
||||
name: Release (MacOS)
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
survey_key:
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
|
||||
jobs:
|
||||
macos:
|
||||
name: MacOS
|
||||
|
||||
runs-on: macos-11
|
||||
env:
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.13
|
||||
|
||||
steps:
|
||||
- name: Download source
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: internal-source
|
||||
|
||||
- name: Unpack source
|
||||
run: |
|
||||
tar -xf source.tar.gz --strip-components=1
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Enable Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Install dependencies
|
||||
env:
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
echo "::group::Install brew dependencies"
|
||||
brew install \
|
||||
pandoc \
|
||||
pkg-config \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Install breakpad dependencies"
|
||||
cargo install dump_syms
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Prepare cache key
|
||||
id: key
|
||||
run: |
|
||||
echo "image=$ImageOS-$ImageVersion" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Enable vcpkg cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /usr/local/share/vcpkg/installed
|
||||
key: ${{ steps.key.outputs.image }}-vcpkg-release-1 # Increase the number whenever dependencies are modified
|
||||
restore-keys: |
|
||||
${{ steps.key.outputs.image }}-vcpkg-release
|
||||
${{ steps.key.outputs.image }}-vcpkg-x64
|
||||
|
||||
- name: Prepare vcpkg
|
||||
run: |
|
||||
vcpkg install \
|
||||
breakpad:x64-osx \
|
||||
breakpad:arm64-osx \
|
||||
curl:x64-osx \
|
||||
curl:arm64-osx \
|
||||
liblzma:x64-osx \
|
||||
liblzma:arm64-osx \
|
||||
libpng:x64-osx \
|
||||
libpng:arm64-osx \
|
||||
lzo:x64-osx \
|
||||
lzo:arm64-osx \
|
||||
nlohmann-json:x64-osx \
|
||||
nlohmann-json:arm64-osx \
|
||||
zlib:x64-osx \
|
||||
zlib:arm64-osx \
|
||||
# EOF
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build tools
|
||||
run: |
|
||||
mkdir build-host
|
||||
cd build-host
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DOPTION_TOOLS_ONLY=ON \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build tools"
|
||||
echo "Running on $(sysctl -n hw.logicalcpu) cores"
|
||||
cmake --build . -j $(sysctl -n hw.logicalcpu) --target tools
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Import code signing certificates
|
||||
uses: Apple-Actions/import-codesign-certs@v2
|
||||
with:
|
||||
# The certificates in a PKCS12 file encoded as a base64 string
|
||||
p12-file-base64: ${{ secrets.APPLE_DEVELOPER_CERTIFICATE_P12_BASE64 }}
|
||||
# The password used to import the PKCS12 file.
|
||||
p12-password: ${{ secrets.APPLE_DEVELOPER_CERTIFICATE_PASSWORD }}
|
||||
# If this is run on a fork, there may not be a certificate set up - continue in this case
|
||||
continue-on-error: true
|
||||
|
||||
- name: Build arm64
|
||||
run: |
|
||||
mkdir build-arm64
|
||||
cd build-arm64
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-DCMAKE_OSX_ARCHITECTURES=arm64 \
|
||||
-DVCPKG_TARGET_TRIPLET=arm64-osx \
|
||||
-DCMAKE_TOOLCHAIN_FILE=/usr/local/share/vcpkg/scripts/buildsystems/vcpkg.cmake \
|
||||
-DHOST_BINARY_DIR=${GITHUB_WORKSPACE}/build-host \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DOPTION_SURVEY_KEY=${{ inputs.survey_key }} \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(sysctl -n hw.logicalcpu) cores"
|
||||
cmake --build . -j $(sysctl -n hw.logicalcpu) --target openttd
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Build x64
|
||||
run: |
|
||||
mkdir build-x64
|
||||
cd build-x64
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-DCMAKE_OSX_ARCHITECTURES=x86_64 \
|
||||
-DVCPKG_TARGET_TRIPLET=x64-osx \
|
||||
-DCMAKE_TOOLCHAIN_FILE=/usr/local/share/vcpkg/scripts/buildsystems/vcpkg.cmake \
|
||||
-DHOST_BINARY_DIR=${GITHUB_WORKSPACE}/build-host \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DOPTION_SURVEY_KEY=${{ inputs.survey_key }} \
|
||||
-DCPACK_BUNDLE_APPLE_CERT_APP=${{ secrets.APPLE_DEVELOPER_CERTIFICATE_ID }} \
|
||||
"-DCPACK_BUNDLE_APPLE_CODESIGN_PARAMETER=--deep -f --options runtime" \
|
||||
-DAPPLE_UNIVERSAL_PACKAGE=1 \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(sysctl -n hw.logicalcpu) cores"
|
||||
cmake --build . -j $(sysctl -n hw.logicalcpu) --target openttd
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Create breakpad symbols
|
||||
run: |
|
||||
cd build-x64
|
||||
mkdir dSYM
|
||||
dsymutil ./openttd -o dSYM/openttd
|
||||
dump_syms ./dSYM/openttd --inlines --store symbols
|
||||
|
||||
cd ../build-arm64
|
||||
mkdir dSYM
|
||||
dsymutil ./openttd -o dSYM/openttd
|
||||
dump_syms ./dSYM/openttd --inlines --store ../build-x64/symbols
|
||||
|
||||
- name: Create bundles
|
||||
run: |
|
||||
cd build-x64
|
||||
|
||||
echo "::group::Create universal binary"
|
||||
# Combine the `openttd` binaries from each build into a single file
|
||||
lipo -create -output openttd-universal ../build-*/openttd
|
||||
mv openttd-universal openttd
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Run CPack"
|
||||
cpack
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Cleanup"
|
||||
# Remove the sha256 files CPack generates; we will do this ourself at
|
||||
# the end of this workflow.
|
||||
rm -f bundles/*.sha256
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Notarize
|
||||
env:
|
||||
AC_USERNAME: ${{ secrets.APPLE_DEVELOPER_APP_USERNAME }}
|
||||
AC_PASSWORD: ${{ secrets.APPLE_DEVELOPER_APP_PASSWORD }}
|
||||
AC_TEAM_ID: ${{ secrets.APPLE_DEVELOPER_TEAM_ID }}
|
||||
run: |
|
||||
if [ -z "${AC_USERNAME}" ]; then
|
||||
# We may be running on a fork that doesn't have notarization secrets set up; skip this step
|
||||
echo No notarization secrets set up, skipping.
|
||||
exit 0
|
||||
fi
|
||||
|
||||
xcrun notarytool store-credentials --apple-id "${AC_USERNAME}" --password "${AC_PASSWORD}" --team-id "${AC_TEAM_ID}" openttd
|
||||
cd build-x64
|
||||
../os/macosx/notarize.sh
|
||||
|
||||
- name: Build zip
|
||||
run: |
|
||||
cd build-x64
|
||||
|
||||
pushd _CPack_Packages/*/Bundle/openttd-*/
|
||||
|
||||
# Remove the Applications symlink from the staging folder
|
||||
rm -f Applications
|
||||
|
||||
# Remove the original dmg built by CPack to avoid a conflict when resolving
|
||||
# the zip_filename variable below
|
||||
rm -f ../*.dmg
|
||||
|
||||
zip_filename=(../openttd-*)
|
||||
|
||||
# Package up the existing, notarised .app into a zip file
|
||||
zip -r -9 ${zip_filename}.zip OpenTTD.app
|
||||
|
||||
popd
|
||||
|
||||
# Now move it into place to be uploaded
|
||||
mv _CPack_Packages/*/Bundle/openttd-*.zip bundles/
|
||||
|
||||
- name: Store bundles
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: openttd-macos-universal
|
||||
path: build-x64/bundles
|
||||
retention-days: 5
|
||||
|
||||
- name: Store symbols
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: symbols-macos-universal
|
||||
path: build-x64/symbols
|
||||
retention-days: 5
|
207
.github/workflows/release-source.yml
vendored
207
.github/workflows/release-source.yml
vendored
@@ -1,207 +0,0 @@
|
||||
name: Release (Source)
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
outputs:
|
||||
version:
|
||||
value: ${{ jobs.source.outputs.version }}
|
||||
is_tag:
|
||||
value: ${{ jobs.source.outputs.is_tag }}
|
||||
trigger_type:
|
||||
value: ${{ jobs.source.outputs.trigger_type }}
|
||||
folder:
|
||||
value: ${{ jobs.source.outputs.folder }}
|
||||
survey_key:
|
||||
value: ${{ jobs.source.outputs.survey_key }}
|
||||
|
||||
jobs:
|
||||
source:
|
||||
name: Source
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
outputs:
|
||||
version: ${{ steps.metadata.outputs.version }}
|
||||
is_tag: ${{ steps.metadata.outputs.is_tag }}
|
||||
trigger_type: ${{ steps.metadata.outputs.trigger_type }}
|
||||
folder: ${{ steps.metadata.outputs.folder }}
|
||||
survey_key: ${{ steps.survey_key.outputs.survey_key }}
|
||||
|
||||
steps:
|
||||
- name: Checkout (Release)
|
||||
if: github.event_name == 'release'
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# We generate a changelog; for this we need the full git log.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Checkout (Manual)
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
# We generate a changelog; for this we need the full git log.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Checkout (Trigger)
|
||||
if: github.event_name == 'repository_dispatch'
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.client_payload.ref }}
|
||||
# We generate a changelog; for this we need the full git log.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check valid branch name
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
REF="${{ github.event.inputs.ref }}"
|
||||
elif [ "${{ github.event_name }}" = "repository_dispatch" ]; then
|
||||
REF="${{ github.event.client_payload.ref }}"
|
||||
else
|
||||
REF="${{ github.ref }}"
|
||||
fi
|
||||
|
||||
# Check if we are a tag.
|
||||
if [ -n "$(git name-rev --name-only --tags --no-undefined HEAD 2>/dev/null || false)" ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Check if the checkout caused the branch to be named.
|
||||
if [ "$(git rev-parse --abbrev-ref HEAD)" != "HEAD" ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Check if this was a pull request.
|
||||
if [ -n "$(echo ${REF} | grep '^refs/pull/[0-9]*')" ]; then
|
||||
PULL=$(echo ${REF} | cut -d/ -f3)
|
||||
git checkout -b pr${PULL}
|
||||
fi
|
||||
|
||||
# Are we still in a detached state? Error out.
|
||||
if [ "$(git rev-parse --abbrev-ref HEAD)" == "HEAD" ]; then
|
||||
echo "The 'ref' given resulted in a checkout of a detached HEAD."
|
||||
echo "We cannot detect the version for these checkout accurate."
|
||||
echo ""
|
||||
echo "If you want to build a Pull Request, make sure you use 'refs/pull/NNN/head'."
|
||||
echo ""
|
||||
echo "Cancelling build, as without a version we cannot store the artifacts."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Generate metadata
|
||||
id: metadata
|
||||
run: |
|
||||
echo "::group::Prepare metadata files"
|
||||
cmake -DGENERATE_OTTDREV=1 -P cmake/scripts/FindVersion.cmake
|
||||
./.github/changelog.sh > .changelog
|
||||
TZ='UTC' date +"%Y-%m-%d %H:%M UTC" > .release_date
|
||||
cat .ottdrev | cut -f 1 -d$'\t' > .version
|
||||
|
||||
if [ $(cat .ottdrev | cut -f 5 -d$'\t') = '1' ]; then
|
||||
# Assume that all tags are always releases. Why else make a tag?
|
||||
IS_TAG="true"
|
||||
|
||||
FOLDER="${{ env.FOLDER_RELEASES }}"
|
||||
TRIGGER_TYPE="new-tag"
|
||||
else
|
||||
IS_TAG="false"
|
||||
|
||||
BRANCH=$(git symbolic-ref -q HEAD | sed 's@.*/@@')
|
||||
if [ -z "${BRANCH}" ]; then
|
||||
echo "Internal error: branch name is empty."
|
||||
echo "An earlier step should have prevented this from happening."
|
||||
echo "Cancelling build, as without a branch name we cannot store the artifacts"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "${BRANCH}" = "${{ env.NIGHTLIES_BRANCH }}" ]; then
|
||||
# The "master" branch is special, and we call a nightly.
|
||||
FOLDER="${{ env.FOLDER_NIGHTLIES }}/$(date +%Y)"
|
||||
TRIGGER_TYPE="new-master"
|
||||
else
|
||||
# All other branches, which can be builds of Pull Requests, are
|
||||
# put in their own folder.
|
||||
FOLDER="${{ env.FOLDER_BRANCHES }}/${BRANCH}"
|
||||
TRIGGER_TYPE="new-branch"
|
||||
fi
|
||||
fi
|
||||
|
||||
mkdir -p build/bundles
|
||||
cp .changelog build/bundles/changelog.txt
|
||||
cp .release_date build/bundles/released.txt
|
||||
cp README.md build/bundles/README.md
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "Release Date: $(cat .release_date)"
|
||||
echo "Revision: $(cat .ottdrev)"
|
||||
echo "Version: $(cat .version)"
|
||||
echo "Is tag: ${IS_TAG}"
|
||||
echo "Folder on CDN: ${FOLDER}"
|
||||
echo "Workflow trigger: ${TRIGGER_TYPE}"
|
||||
|
||||
echo "version=$(cat .version)" >> $GITHUB_OUTPUT
|
||||
echo "is_tag=${IS_TAG}" >> $GITHUB_OUTPUT
|
||||
echo "folder=${FOLDER}" >> $GITHUB_OUTPUT
|
||||
echo "trigger_type=${TRIGGER_TYPE}" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
NIGHTLIES_BRANCH: master
|
||||
FOLDER_RELEASES: openttd-releases
|
||||
FOLDER_NIGHTLIES: openttd-nightlies
|
||||
FOLDER_BRANCHES: openttd-branches
|
||||
|
||||
- name: Generate survey key
|
||||
id: survey_key
|
||||
run: |
|
||||
if [ -z "${{ vars.SURVEY_TYPE }}" ]; then
|
||||
echo "SURVEY_TYPE variable not found; most likely running in a fork. Skipping step."
|
||||
SURVEY_KEY=""
|
||||
else
|
||||
PAYLOAD='{"version":"${{ steps.metadata.outputs.version }}","type":"${{ vars.SURVEY_TYPE }}"}'
|
||||
|
||||
echo "${{ secrets.SURVEY_SIGNING_KEY }}" > survey_signing_key.pem
|
||||
SIGNATURE=$(echo -n "${PAYLOAD}" | openssl dgst -sha256 -sign survey_signing_key.pem | base64 -w0)
|
||||
rm -f survey_signing_key.pem
|
||||
|
||||
SURVEY_KEY=$(curl -f -s -X POST -d "${PAYLOAD}" -H "Content-Type: application/json" -H "X-Signature: ${SIGNATURE}" https://survey-participate.openttd.org/create-survey-key/${{ vars.SURVEY_TYPE }})
|
||||
fi
|
||||
|
||||
echo "survey_key=${SURVEY_KEY}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Remove VCS information
|
||||
run: |
|
||||
rm -rf .git
|
||||
|
||||
- name: Create bundles
|
||||
run: |
|
||||
FOLDER_NAME=openttd-${{ steps.metadata.outputs.version }}
|
||||
|
||||
# Rename the folder to openttd-NNN
|
||||
mkdir ${FOLDER_NAME}
|
||||
find . -maxdepth 1 -not -name . -not -name build -not -name ${FOLDER_NAME} -exec mv {} ${FOLDER_NAME}/ \;
|
||||
|
||||
echo "::group::Create tarball (xz) bundle"
|
||||
tar --xz -cvf build/bundles/${FOLDER_NAME}-source.tar.xz ${FOLDER_NAME}
|
||||
echo "::endgroup::"
|
||||
|
||||
# This tarball is only to be used within this workflow.
|
||||
echo "::group::Create tarball (gz) bundle"
|
||||
tar --gzip -cvf source.tar.gz ${FOLDER_NAME}
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Create zip bundle"
|
||||
zip -9 -r build/bundles/${FOLDER_NAME}-source.zip ${FOLDER_NAME}
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Store bundles
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: openttd-source
|
||||
path: build/bundles/*
|
||||
retention-days: 5
|
||||
|
||||
- name: Store source (for other jobs)
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: internal-source
|
||||
path: source.tar.gz
|
||||
retention-days: 1
|
192
.github/workflows/release-windows-store.yml
vendored
192
.github/workflows/release-windows-store.yml
vendored
@@ -1,192 +0,0 @@
|
||||
name: Release (Windows Store)
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
windows-store:
|
||||
name: Windows Store
|
||||
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- name: Download source
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: internal-source
|
||||
|
||||
- name: Unpack source
|
||||
shell: bash
|
||||
run: |
|
||||
tar -xf source.tar.gz --strip-components=1
|
||||
|
||||
- name: Download x86 build
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: openttd-windows-x86
|
||||
|
||||
- name: Download x64 build
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: openttd-windows-x64
|
||||
|
||||
- name: Download arm64 build
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: openttd-windows-arm64
|
||||
|
||||
- name: Unpack builds
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir builds
|
||||
cd builds
|
||||
|
||||
function extract {
|
||||
mkdir $1
|
||||
|
||||
# Extract the zip version of the release
|
||||
unzip ../openttd-*-windows-$2.zip -d $1
|
||||
|
||||
# Remove the extraneous directory
|
||||
mv $1/openttd-*-windows-$2/* $1/
|
||||
rmdir $1/openttd-*-windows-$2
|
||||
|
||||
# Move the openttd.exe to the '{arch}-binaries' folder
|
||||
mkdir $1-binaries
|
||||
mv $1/openttd.exe $1-binaries/
|
||||
}
|
||||
|
||||
extract x86 win32
|
||||
extract x64 win64
|
||||
extract arm64 arm64
|
||||
|
||||
# Use the "x86" folder as the source of the common binaries (lang files, etc) and remove the others
|
||||
mv x86 common-binaries
|
||||
rm -rf x64 arm64
|
||||
|
||||
- name: Install OpenGFX
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p builds/common-binaries/baseset
|
||||
cd builds/common-binaries/baseset
|
||||
|
||||
echo "::group::Download OpenGFX"
|
||||
curl -L https://cdn.openttd.org/opengfx-releases/7.1/opengfx-7.1-all.zip -o opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Unpack OpenGFX"
|
||||
unzip opengfx-all.zip
|
||||
tar xf opengfx-*.tar
|
||||
echo "::endgroup::"
|
||||
|
||||
rm -f opengfx-all.zip opengfx-*.tar
|
||||
|
||||
- name: Install OpenMSX
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p builds/common-binaries/baseset
|
||||
cd builds/common-binaries/baseset
|
||||
|
||||
echo "::group::Download OpenMSX"
|
||||
curl -L https://cdn.openttd.org/openmsx-releases/0.4.2/openmsx-0.4.2-all.zip -o openmsx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Unpack OpenGFX"
|
||||
unzip openmsx-all.zip
|
||||
tar xf openmsx-*.tar
|
||||
echo "::endgroup::"
|
||||
|
||||
rm -f openmsx-all.zip openmsx-*.tar
|
||||
|
||||
- name: Install OpenSFX
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p builds/common-binaries/baseset/opensfx
|
||||
cd builds/common-binaries/baseset/opensfx
|
||||
|
||||
echo "::group::Download OpenSFX"
|
||||
curl -L https://cdn.openttd.org/opensfx-releases/1.0.3/opensfx-1.0.3-all.zip -o opensfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Unpack OpenSFX"
|
||||
unzip opensfx-all.zip
|
||||
tar xf opensfx-*.tar
|
||||
echo "::endgroup::"
|
||||
|
||||
rm -f opensfx-all.zip opensfx-*.tar
|
||||
|
||||
- name: Generate signing certificate
|
||||
shell: cmd
|
||||
run: |
|
||||
cd builds
|
||||
|
||||
REM We need to provide a signed .appx to the Windows Store, so generate a certificate with password 'password'
|
||||
call ..\os\windows\winstore\generate-key.bat "CN=78024DA8-4BE4-4C77-B12E-547BBF7359D2" password cert.pfx
|
||||
|
||||
- name: Generate assets
|
||||
shell: cmd
|
||||
run: |
|
||||
cd os\windows\winstore
|
||||
call generate-assets.bat
|
||||
|
||||
- name: Prepare manifests
|
||||
shell: cmd
|
||||
run: |
|
||||
cd builds
|
||||
mkdir manifests
|
||||
|
||||
REM Set the version environment variable
|
||||
call ..\os\windows\winstore\set-version.bat x86-binaries\openttd.exe
|
||||
|
||||
call ..\os\windows\winstore\prepare-manifests.bat manifests "CN=78024DA8-4BE4-4C77-B12E-547BBF7359D2" "57420OpenTTDDevelopers.OpenTTDofficial"
|
||||
|
||||
- name: Prepare binaries
|
||||
shell: bash
|
||||
run: |
|
||||
cd builds
|
||||
|
||||
# Copy the Windows Store assets
|
||||
mkdir common-binaries/Assets
|
||||
cp -R ../os/windows/winstore/assets-common/* common-binaries/Assets/
|
||||
|
||||
mkdir Assets
|
||||
cp -R ../os/windows/winstore/assets/* Assets/
|
||||
|
||||
cp manifests/*.xml .
|
||||
|
||||
- name: Build
|
||||
shell: cmd
|
||||
run: |
|
||||
REM Add the Windows SDK tools to the PATH
|
||||
|
||||
echo|set /p="SET VS_INSTALLDIR=" > _vspath.bat
|
||||
vswhere -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -property installationPath >> _vspath.bat
|
||||
call _vspath.bat
|
||||
call "%VS_INSTALLDIR%\Common7\Tools\VsDevCmd.bat"
|
||||
|
||||
REM Set the version environment variable
|
||||
call os\windows\winstore\set-version.bat builds\x86-binaries\openttd.exe
|
||||
|
||||
cd builds
|
||||
mkdir output
|
||||
|
||||
REM Build and sign the package
|
||||
makeappx build /v /f PackagingLayout.xml /op output\ /bv %OTTD_VERSION% /pv %OTTD_VERSION% /ca
|
||||
SignTool sign /fd sha256 /a /f cert.pfx /p password "output\OpenTTD.appxbundle"
|
||||
|
||||
REM Move resulting files to bundles folder
|
||||
mkdir bundles
|
||||
mkdir bundles\internal
|
||||
move cert.pfx bundles\internal\openttd-${{ inputs.version }}-windows-store.pfx
|
||||
move output\OpenTTD.appxbundle bundles\internal\openttd-${{ inputs.version }}-windows-store.appxbundle
|
||||
|
||||
- name: Store bundles
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: openttd-windows-store
|
||||
path: builds/bundles
|
||||
retention-days: 5
|
243
.github/workflows/release-windows.yml
vendored
243
.github/workflows/release-windows.yml
vendored
@@ -1,243 +0,0 @@
|
||||
name: Release (Windows)
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
survey_key:
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
is_tag:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
windows:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arch: x86
|
||||
host: x86
|
||||
- arch: x64
|
||||
host: x64
|
||||
- arch: arm64
|
||||
host: x64_arm64
|
||||
|
||||
name: Windows (${{ matrix.arch }})
|
||||
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- name: Download source
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: internal-source
|
||||
|
||||
- name: Unpack source
|
||||
shell: bash
|
||||
run: |
|
||||
tar -xf source.tar.gz --strip-components=1
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Enable Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
echo "::group::Install choco dependencies"
|
||||
choco install pandoc
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Install breakpad dependencies"
|
||||
cargo install dump_syms
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Prepare cache key
|
||||
id: key
|
||||
shell: powershell
|
||||
run: |
|
||||
# Work around caching failure with GNU tar
|
||||
New-Item -Type Junction -Path vcpkg -Target c:\vcpkg
|
||||
|
||||
Write-Output "image=$env:ImageOS-$env:ImageVersion" >> $env:GITHUB_OUTPUT
|
||||
|
||||
- name: Enable vcpkg cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: vcpkg/installed
|
||||
key: ${{ steps.key.outputs.image }}-vcpkg-${{ matrix.arch }}-1 # Increase the number whenever dependencies are modified
|
||||
restore-keys: |
|
||||
${{ steps.key.outputs.image }}-vcpkg-${{ matrix.arch }}
|
||||
|
||||
- name: Prepare vcpkg
|
||||
shell: bash
|
||||
run: |
|
||||
vcpkg install --triplet=${{ matrix.arch }}-windows-static \
|
||||
liblzma \
|
||||
libpng \
|
||||
lzo \
|
||||
nlohmann-json \
|
||||
zlib \
|
||||
# EOF
|
||||
|
||||
# arm64-windows-static is not (yet) supported for breakpad.
|
||||
if [ "${{ matrix.arch }}" != "arm64" ]; then
|
||||
vcpkg install --triplet=${{ matrix.arch }}-windows-static \
|
||||
breakpad \
|
||||
# EOF
|
||||
fi
|
||||
|
||||
- name: Install MSVC problem matcher
|
||||
uses: ammaraskar/msvc-problem-matcher@master
|
||||
|
||||
- name: Configure developer command prompt for tools
|
||||
uses: ilammy/msvc-dev-cmd@v1
|
||||
with:
|
||||
arch: x64
|
||||
|
||||
- name: Build tools
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir build-host
|
||||
cd build-host
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-GNinja \
|
||||
-DOPTION_TOOLS_ONLY=ON \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
cmake --build . --target tools
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Configure developer command prompt for ${{ matrix.arch }}
|
||||
uses: ilammy/msvc-dev-cmd@v1
|
||||
with:
|
||||
arch: ${{ matrix.host }}
|
||||
|
||||
- name: Import code signing certificate
|
||||
shell: powershell
|
||||
# If this is run on a fork, there may not be a certificate set up - continue in this case
|
||||
continue-on-error: true
|
||||
run: |
|
||||
$tempFile = [System.IO.Path]::GetTempFileName()
|
||||
$bytes = [System.Convert]::FromBase64String($env:WINDOWS_CERTIFICATE_P12)
|
||||
[IO.File]::WriteAllBytes($tempFile, $bytes)
|
||||
$pwd = ConvertTo-SecureString $env:WINDOWS_CERTIFICATE_PASSWORD -AsPlainText -Force
|
||||
Import-PfxCertificate -FilePath $tempFile -CertStoreLocation Cert:\CurrentUser\My -Password $pwd
|
||||
Remove-Item $tempFile
|
||||
env:
|
||||
WINDOWS_CERTIFICATE_P12: ${{ secrets.WINDOWS_CERTIFICATE_P12 }}
|
||||
WINDOWS_CERTIFICATE_PASSWORD: ${{ secrets.WINDOWS_CERTIFICATE_PASSWORD }}
|
||||
|
||||
- name: Build (with installer)
|
||||
if: inputs.is_tag == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-GNinja \
|
||||
-DVCPKG_TARGET_TRIPLET=${{ matrix.arch }}-windows-static \
|
||||
-DCMAKE_TOOLCHAIN_FILE="c:\vcpkg\scripts\buildsystems\vcpkg.cmake" \
|
||||
-DOPTION_USE_NSIS=ON \
|
||||
-DHOST_BINARY_DIR=${GITHUB_WORKSPACE}/build-host \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DOPTION_SURVEY_KEY=${{ inputs.survey_key }} \
|
||||
-DWINDOWS_CERTIFICATE_COMMON_NAME="${WINDOWS_CERTIFICATE_COMMON_NAME}" \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
cmake --build . --target openttd
|
||||
echo "::endgroup::"
|
||||
env:
|
||||
WINDOWS_CERTIFICATE_COMMON_NAME: ${{ secrets.WINDOWS_CERTIFICATE_COMMON_NAME }}
|
||||
|
||||
- name: Build (without installer)
|
||||
if: inputs.is_tag != 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-GNinja \
|
||||
-DVCPKG_TARGET_TRIPLET=${{ matrix.arch }}-windows-static \
|
||||
-DCMAKE_TOOLCHAIN_FILE="c:\vcpkg\scripts\buildsystems\vcpkg.cmake" \
|
||||
-DHOST_BINARY_DIR=${GITHUB_WORKSPACE}/build-host \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DOPTION_SURVEY_KEY=${{ inputs.survey_key }} \
|
||||
-DWINDOWS_CERTIFICATE_COMMON_NAME="${WINDOWS_CERTIFICATE_COMMON_NAME}" \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
cmake --build . --target openttd
|
||||
echo "::endgroup::"
|
||||
env:
|
||||
WINDOWS_CERTIFICATE_COMMON_NAME: ${{ secrets.WINDOWS_CERTIFICATE_COMMON_NAME }}
|
||||
|
||||
- name: Create breakpad symbols
|
||||
shell: bash
|
||||
run: |
|
||||
cd build
|
||||
dump_syms openttd.pdb --inlines --store symbols
|
||||
|
||||
- name: Create bundles
|
||||
shell: bash
|
||||
run: |
|
||||
cd ${GITHUB_WORKSPACE}/build
|
||||
echo "::group::Run CPack"
|
||||
cpack
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Move PDB and exe to symbols"
|
||||
PDB_FOLDER=$(find symbols -mindepth 2 -type d)
|
||||
cp openttd.pdb ${PDB_FOLDER}/
|
||||
|
||||
EXE_FOLDER=symbols/openttd.exe/$(grep "INFO CODE_ID" symbols/*/*/openttd.sym | cut -d\ -f3)
|
||||
mkdir -p ${EXE_FOLDER}
|
||||
cp openttd.exe ${EXE_FOLDER}/
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Cleanup"
|
||||
# Remove the sha256 files CPack generates; we will do this ourself at
|
||||
# the end of this workflow.
|
||||
rm -f bundles/*.sha256
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Sign installer
|
||||
if: inputs.is_tag == 'true'
|
||||
shell: bash
|
||||
# If this is run on a fork, there may not be a certificate set up - continue in this case
|
||||
continue-on-error: true
|
||||
run: |
|
||||
cd ${GITHUB_WORKSPACE}/build/bundles
|
||||
../../os/windows/sign.bat *.exe "${WINDOWS_CERTIFICATE_COMMON_NAME}"
|
||||
env:
|
||||
WINDOWS_CERTIFICATE_COMMON_NAME: ${{ secrets.WINDOWS_CERTIFICATE_COMMON_NAME }}
|
||||
|
||||
- name: Store bundles
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: openttd-windows-${{ matrix.arch }}
|
||||
path: build/bundles
|
||||
retention-days: 5
|
||||
|
||||
- name: Store symbols
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: symbols-windows-${{ matrix.arch }}
|
||||
path: build/symbols
|
||||
retention-days: 5
|
132
.github/workflows/release.yml
vendored
132
.github/workflows/release.yml
vendored
@@ -1,132 +0,0 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: 'Ref to build (for Pull Requests, use refs/pull/NNN/head)'
|
||||
required: true
|
||||
repository_dispatch:
|
||||
# client_payload should be the same as the inputs for workflow_dispatch.
|
||||
types:
|
||||
- Build*
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
|
||||
jobs:
|
||||
source:
|
||||
name: Source
|
||||
|
||||
uses: ./.github/workflows/release-source.yml
|
||||
secrets: inherit
|
||||
|
||||
docs:
|
||||
name: Docs
|
||||
needs: source
|
||||
|
||||
uses: ./.github/workflows/release-docs.yml
|
||||
secrets: inherit
|
||||
|
||||
with:
|
||||
version: ${{ needs.source.outputs.version }}
|
||||
|
||||
linux:
|
||||
name: Linux (Generic)
|
||||
needs: source
|
||||
|
||||
uses: ./.github/workflows/release-linux.yml
|
||||
secrets: inherit
|
||||
|
||||
with:
|
||||
survey_key: ${{ needs.source.outputs.survey_key }}
|
||||
|
||||
macos:
|
||||
name: MacOS
|
||||
needs: source
|
||||
|
||||
uses: ./.github/workflows/release-macos.yml
|
||||
secrets: inherit
|
||||
|
||||
with:
|
||||
survey_key: ${{ needs.source.outputs.survey_key }}
|
||||
|
||||
windows:
|
||||
name: Windows
|
||||
needs: source
|
||||
|
||||
uses: ./.github/workflows/release-windows.yml
|
||||
secrets: inherit
|
||||
|
||||
with:
|
||||
is_tag: ${{ needs.source.outputs.is_tag }}
|
||||
survey_key: ${{ needs.source.outputs.survey_key }}
|
||||
|
||||
windows-store:
|
||||
name: Windows Store
|
||||
needs:
|
||||
- source
|
||||
- windows
|
||||
|
||||
if: needs.source.outputs.is_tag == 'true'
|
||||
|
||||
uses: ./.github/workflows/release-windows-store.yml
|
||||
secrets: inherit
|
||||
|
||||
with:
|
||||
version: ${{ needs.source.outputs.version }}
|
||||
|
||||
upload-cdn:
|
||||
name: Upload (CDN)
|
||||
needs:
|
||||
- source
|
||||
- docs
|
||||
- linux
|
||||
- macos
|
||||
- windows
|
||||
- windows-store
|
||||
|
||||
# As windows-store is condition, we need to check ourselves if we need to run.
|
||||
# The always() makes sure the rest is always evaluated.
|
||||
if: always() && needs.source.result == 'success' && needs.docs.result == 'success' && needs.linux.result == 'success' && needs.macos.result == 'success' && needs.windows.result == 'success' && (needs.windows-store.result == 'success' || needs.windows-store.result == 'skipped')
|
||||
|
||||
uses: ./.github/workflows/upload-cdn.yml
|
||||
secrets: inherit
|
||||
|
||||
with:
|
||||
version: ${{ needs.source.outputs.version }}
|
||||
folder: ${{ needs.source.outputs.folder }}
|
||||
trigger_type: ${{ needs.source.outputs.trigger_type }}
|
||||
|
||||
upload-steam:
|
||||
name: Upload (Steam)
|
||||
needs:
|
||||
- source
|
||||
- linux
|
||||
- macos
|
||||
- windows
|
||||
|
||||
if: needs.source.outputs.trigger_type == 'new-master' || needs.source.outputs.trigger_type == 'new-tag'
|
||||
|
||||
uses: ./.github/workflows/upload-steam.yml
|
||||
secrets: inherit
|
||||
|
||||
with:
|
||||
version: ${{ needs.source.outputs.version }}
|
||||
trigger_type: ${{ needs.source.outputs.trigger_type }}
|
||||
|
||||
upload-gog:
|
||||
name: Upload (GOG)
|
||||
needs:
|
||||
- source
|
||||
- linux
|
||||
- macos
|
||||
- windows
|
||||
|
||||
if: needs.source.outputs.trigger_type == 'new-tag'
|
||||
|
||||
uses: ./.github/workflows/upload-gog.yml
|
||||
secrets: inherit
|
||||
|
||||
with:
|
||||
version: ${{ needs.source.outputs.version }}
|
@@ -1,22 +0,0 @@
|
||||
name: Script missing mode enforcement
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: ${{ github.ref != 'refs/heads/master' }}
|
||||
|
||||
jobs:
|
||||
script-missing-mode-enforcement:
|
||||
name: Script missing mode enforcement
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Check for finding script functions that require company/deity mode enforcement/checks
|
||||
run: |
|
||||
set -ex
|
||||
python3 .github/script-missing-mode-enforcement.py
|
22
.github/workflows/unused-strings.yml
vendored
22
.github/workflows/unused-strings.yml
vendored
@@ -1,22 +0,0 @@
|
||||
name: Unused strings
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: ${{ github.ref != 'refs/heads/master' }}
|
||||
|
||||
jobs:
|
||||
unused-strings:
|
||||
name: Unused strings
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Check for unused strings
|
||||
run: |
|
||||
set -ex
|
||||
python3 .github/unused-strings.py
|
137
.github/workflows/upload-cdn.yml
vendored
137
.github/workflows/upload-cdn.yml
vendored
@@ -1,137 +0,0 @@
|
||||
name: Upload (CDN)
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version:
|
||||
required: true
|
||||
type: string
|
||||
folder:
|
||||
required: true
|
||||
type: string
|
||||
trigger_type:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
name: Prepare
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Download all bundles
|
||||
uses: actions/download-artifact@v3
|
||||
|
||||
- name: Calculate checksums
|
||||
run: |
|
||||
echo "::group::Move bundles to a single folder"
|
||||
mkdir bundles
|
||||
mv openttd-*/* bundles/
|
||||
echo "::endgroup::"
|
||||
|
||||
cd bundles
|
||||
for i in $(ls openttd-*); do
|
||||
echo "::group::Calculating checksums for ${i}"
|
||||
openssl dgst -r -md5 -hex $i > $i.md5sum
|
||||
openssl dgst -r -sha1 -hex $i > $i.sha1sum
|
||||
openssl dgst -r -sha256 -hex $i > $i.sha256sum
|
||||
echo "::endgroup::"
|
||||
done
|
||||
|
||||
# Some targets generate files that are meant for our-eyes-only.
|
||||
# They are stored in the "internal" folder, and contains bundles
|
||||
# for targets like Windows Store. No user has a benefit of knowing
|
||||
# they exist, hence: internal.
|
||||
if [ -e internal ]; then
|
||||
cd internal
|
||||
for i in $(ls openttd-*); do
|
||||
echo "::group::Calculating checksums for ${i}"
|
||||
openssl dgst -r -md5 -hex $i > $i.md5sum
|
||||
openssl dgst -r -sha1 -hex $i > $i.sha1sum
|
||||
openssl dgst -r -sha256 -hex $i > $i.sha256sum
|
||||
echo "::endgroup::"
|
||||
done
|
||||
fi
|
||||
|
||||
- name: Merge symbols
|
||||
run: |
|
||||
mkdir symbols
|
||||
cp -R symbols-*/* symbols/
|
||||
|
||||
# Compress all files as gzip, to reduce cost of storage on the CDN.
|
||||
for i in $(find symbols -mindepth 2 -type f); do
|
||||
gzip ${i}
|
||||
done
|
||||
|
||||
# Leave a mark in each folder what version actually created the symbol file.
|
||||
for i in $(find symbols -mindepth 2 -type d); do
|
||||
touch ${i}/.${{ inputs.version }}.txt
|
||||
done
|
||||
|
||||
- name: Store bundles
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: cdn-bundles
|
||||
path: bundles/*
|
||||
retention-days: 5
|
||||
|
||||
- name: Store breakpad symbols
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: cdn-symbols
|
||||
path: symbols/*
|
||||
retention-days: 5
|
||||
|
||||
publish-bundles:
|
||||
needs:
|
||||
- prepare
|
||||
|
||||
name: Publish bundles
|
||||
uses: OpenTTD/actions/.github/workflows/rw-cdn-upload.yml@v4
|
||||
secrets:
|
||||
CDN_SIGNING_KEY: ${{ secrets.CDN_SIGNING_KEY }}
|
||||
DEPLOYMENT_APP_ID: ${{ secrets.DEPLOYMENT_APP_ID }}
|
||||
DEPLOYMENT_APP_PRIVATE_KEY: ${{ secrets.DEPLOYMENT_APP_PRIVATE_KEY }}
|
||||
with:
|
||||
artifact-name: cdn-bundles
|
||||
folder: ${{ inputs.folder }}
|
||||
version: ${{ inputs.version }}
|
||||
|
||||
publish-symbols:
|
||||
needs:
|
||||
- prepare
|
||||
|
||||
name: Publish symbols
|
||||
uses: OpenTTD/actions/.github/workflows/rw-symbols-upload.yml@v4
|
||||
secrets:
|
||||
SYMBOLS_SIGNING_KEY: ${{ secrets.SYMBOLS_SIGNING_KEY }}
|
||||
with:
|
||||
artifact-name: cdn-symbols
|
||||
repository: OpenTTD
|
||||
|
||||
docs:
|
||||
if: ${{ inputs.trigger_type == 'new-master' }}
|
||||
needs:
|
||||
- publish-bundles
|
||||
|
||||
name: Publish docs
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Generate access token
|
||||
id: generate_token
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.DEPLOYMENT_APP_ID }}
|
||||
private_key: ${{ secrets.DEPLOYMENT_APP_PRIVATE_KEY }}
|
||||
repository: OpenTTD/workflows
|
||||
|
||||
- name: Trigger 'Publish Docs'
|
||||
uses: peter-evans/repository-dispatch@v2
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
repository: OpenTTD/workflows
|
||||
event-type: publish-docs
|
||||
client-payload: '{"version": "${{ inputs.version }}", "folder": "${{ inputs.folder }}"}'
|
151
.github/workflows/upload-gog.yml
vendored
151
.github/workflows/upload-gog.yml
vendored
@@ -1,151 +0,0 @@
|
||||
name: Upload (GOG)
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
name: Upload (GOG)
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Download source
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: internal-source
|
||||
path: internal-source
|
||||
|
||||
- name: Download bundle (Windows x86)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: openttd-windows-x86
|
||||
path: openttd-windows-x86
|
||||
|
||||
- name: Download bundle (Windows x64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: openttd-windows-x64
|
||||
path: openttd-windows-x64
|
||||
|
||||
- name: Download bundle (MacOS)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: openttd-macos-universal
|
||||
path: openttd-macos-universal
|
||||
|
||||
- name: Download bundle (Linux)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: openttd-linux-generic
|
||||
path: openttd-linux-generic
|
||||
|
||||
- name: Install GOG Galaxy Build Creator
|
||||
run: |
|
||||
wget https://cdn.gog.com/open/galaxy/pipeline/build_creator/gnu-linux/GOGGalaxyBuildCreator-1.4.0.AppImage
|
||||
7z x GOGGalaxyBuildCreator-1.4.0.AppImage
|
||||
chmod +x ./app/GOGGalaxyPipelineBuilder
|
||||
|
||||
- name: Install OpenGFX
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p gog/opengfx/baseset
|
||||
cd gog/opengfx/baseset
|
||||
|
||||
echo "::group::Download OpenGFX"
|
||||
curl -L https://cdn.openttd.org/opengfx-releases/7.1/opengfx-7.1-all.zip -o opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Unpack OpenGFX"
|
||||
unzip opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
rm -f opengfx-all.zip
|
||||
|
||||
- name: Install OpenMSX
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p gog/openmsx/baseset
|
||||
cd gog/openmsx/baseset
|
||||
|
||||
echo "::group::Download OpenMSX"
|
||||
curl -L https://cdn.openttd.org/openmsx-releases/0.4.2/openmsx-0.4.2-all.zip -o openmsx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Unpack OpenGFX"
|
||||
unzip openmsx-all.zip
|
||||
tar xf openmsx-*.tar
|
||||
echo "::endgroup::"
|
||||
|
||||
rm -f openmsx-all.zip openmsx-*.tar
|
||||
|
||||
- name: Install OpenSFX
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p gog/opensfx/baseset
|
||||
cd gog/opensfx/baseset
|
||||
|
||||
echo "::group::Download OpenSFX"
|
||||
curl -L https://cdn.openttd.org/opensfx-releases/1.0.3/opensfx-1.0.3-all.zip -o opensfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Unpack OpenSFX"
|
||||
unzip opensfx-all.zip
|
||||
tar xf opensfx-*.tar
|
||||
echo "::endgroup::"
|
||||
|
||||
rm -f opensfx-all.zip opensfx-*.tar
|
||||
|
||||
- name: Upload to GOG
|
||||
run: |
|
||||
echo "::group::Extracting source"
|
||||
mkdir source
|
||||
(
|
||||
cd source
|
||||
tar -xf ../internal-source/source.tar.gz --strip-components=1
|
||||
)
|
||||
echo "::endgroup::"
|
||||
|
||||
(
|
||||
cd gog
|
||||
|
||||
echo "::group::Prepare Win32"
|
||||
unzip ../openttd-windows-x86/openttd-*-windows-win32.zip
|
||||
mv openttd-*-windows-win32 win32
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Prepare Win64"
|
||||
unzip ../openttd-windows-x64/openttd-*-windows-win64.zip
|
||||
mv openttd-*-windows-win64 win64
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Prepare macOS"
|
||||
mkdir macos
|
||||
(
|
||||
cd macos
|
||||
unzip ../../openttd-macos-universal/openttd-*-macos-universal.zip
|
||||
)
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Prepare Linux"
|
||||
tar xvf ../openttd-linux-generic/openttd-*-linux-generic-amd64.tar.xz
|
||||
mv openttd-*-linux-generic-amd64 linux
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Preparing build files"
|
||||
cp ../source/os/gog/*.json .
|
||||
for json in $(ls *.json); do
|
||||
sed -i 's/VERSION/${{ inputs.version }}/g;s/CLIENT_ID/${{ secrets.GOG_CLIENT_ID }}/g;s/CLIENT_SECRET/${{ secrets.GOG_CLIENT_SECRET }}/g' ${json}
|
||||
done
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Upload to GOG"
|
||||
../app/GOGGalaxyPipelineBuilder build-game --username "${{ secrets.GOG_USERNAME }}" --password "${{ secrets.GOG_PASSWORD }}" --branch Testing windows.json
|
||||
../app/GOGGalaxyPipelineBuilder build-game --username "${{ secrets.GOG_USERNAME }}" --password "${{ secrets.GOG_PASSWORD }}" --branch Testing macos.json
|
||||
../app/GOGGalaxyPipelineBuilder build-game --username "${{ secrets.GOG_USERNAME }}" --password "${{ secrets.GOG_PASSWORD }}" --branch Testing linux.json
|
||||
echo "::endgroup::"
|
||||
)
|
109
.github/workflows/upload-steam.yml
vendored
109
.github/workflows/upload-steam.yml
vendored
@@ -1,109 +0,0 @@
|
||||
name: Upload (Steam)
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version:
|
||||
required: true
|
||||
type: string
|
||||
trigger_type:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
name: Upload (Steam)
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Download source
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: internal-source
|
||||
path: internal-source
|
||||
|
||||
- name: Download bundle (Windows x86)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: openttd-windows-x86
|
||||
path: openttd-windows-x86
|
||||
|
||||
- name: Download bundle (Windows x64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: openttd-windows-x64
|
||||
path: openttd-windows-x64
|
||||
|
||||
- name: Download bundle (MacOS)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: openttd-macos-universal
|
||||
path: openttd-macos-universal
|
||||
|
||||
- name: Download bundle (Linux)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: openttd-linux-generic
|
||||
path: openttd-linux-generic
|
||||
|
||||
- name: Setup steamcmd
|
||||
uses: CyberAndrii/setup-steamcmd@v1
|
||||
|
||||
- name: Generate Steam auth code
|
||||
id: steam-totp
|
||||
uses: CyberAndrii/steam-totp@v1
|
||||
with:
|
||||
shared_secret: ${{ secrets.STEAM_SHARED_SECRET }}
|
||||
|
||||
- name: Upload to Steam
|
||||
run: |
|
||||
echo "::group::Extracting source"
|
||||
mkdir source
|
||||
(
|
||||
cd source
|
||||
tar -xf ../internal-source/source.tar.gz --strip-components=1
|
||||
)
|
||||
echo "::endgroup::"
|
||||
|
||||
mkdir steam
|
||||
(
|
||||
cd steam
|
||||
|
||||
echo "::group::Prepare Win32"
|
||||
unzip ../openttd-windows-x86/openttd-*-windows-win32.zip
|
||||
mv openttd-*-windows-win32 steam-win32
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Prepare Win64"
|
||||
unzip ../openttd-windows-x64/openttd-*-windows-win64.zip
|
||||
mv openttd-*-windows-win64 steam-win64
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Prepare macOS"
|
||||
mkdir steam-macos
|
||||
(
|
||||
cd steam-macos
|
||||
unzip ../../openttd-macos-universal/openttd-*-macos-universal.zip
|
||||
)
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Prepare Linux"
|
||||
tar xvf ../openttd-linux-generic/openttd-*-linux-generic-amd64.tar.xz
|
||||
mv openttd-*-linux-generic-amd64 steam-linux
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Preparing build file"
|
||||
if [ "${{ inputs.trigger_type }}" = "new-tag" ]; then
|
||||
BRANCH="testing"
|
||||
else
|
||||
BRANCH="nightly"
|
||||
fi
|
||||
cat ../source/os/steam/release.vdf | sed 's/@@DESCRIPTION@@/openttd-${{ inputs.version }}/;s/@@BRANCH@@/'${BRANCH}'/' > release.vdf
|
||||
cat release.vdf
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Upload to Steam"
|
||||
steamcmd +login ${{ secrets.STEAM_USERNAME }} ${{ secrets.STEAM_PASSWORD }} ${{ steps.steam-totp.outputs.code }} +run_app_build $(pwd)/release.vdf +quit
|
||||
echo "::endgroup::"
|
||||
)
|
24
.github/workflows/windowdesc-ini-key.yml
vendored
24
.github/workflows/windowdesc-ini-key.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: WindowDesc ini_key
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: ${{ github.ref != 'refs/heads/master' }}
|
||||
|
||||
jobs:
|
||||
windowdesc-ini-key:
|
||||
name: WindowDesc ini_key issues
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Check for ini_key issues in WindowDesc entries
|
||||
shell: bash
|
||||
run: |
|
||||
python3 .github/windowdesc-ini-key.py
|
56
.gitignore
vendored
56
.gitignore
vendored
@@ -1,7 +1,55 @@
|
||||
/.vs
|
||||
/build*
|
||||
CMakeSettings.json
|
||||
bin/*
|
||||
!bin/ai
|
||||
bin/ai/*
|
||||
!bin/ai/compat*.nut
|
||||
!bin/ai/regression
|
||||
!bin/data
|
||||
bin/baseset/*
|
||||
!bin/baseset/openttd.grf
|
||||
!bin/baseset/opntitle.dat
|
||||
!bin/baseset/orig_extra.grf
|
||||
!bin/baseset/orig_*.obg
|
||||
!bin/baseset/orig_*.obs
|
||||
!bin/baseset/no_sound.obs
|
||||
!bin/baseset/no_music.obm
|
||||
!bin/baseset/orig_*.obm
|
||||
!bin/game
|
||||
bin/game/*
|
||||
!bin/game/compat*.nut
|
||||
!bin/scripts
|
||||
bin/scripts/*
|
||||
!bin/scripts/*.example
|
||||
!bin/scripts/readme.txt
|
||||
|
||||
*.aps
|
||||
bundle/*
|
||||
bundles/*
|
||||
docs/aidocs/*
|
||||
docs/gamedocs/*
|
||||
docs/source/*
|
||||
/out
|
||||
.kdev4
|
||||
.kdev4/*
|
||||
*.kdev4
|
||||
media/openttd.desktop
|
||||
media/openttd.desktop.install
|
||||
objs/*
|
||||
projects/.vs
|
||||
projects/Debug
|
||||
projects/Release
|
||||
projects/*.ncb
|
||||
projects/*.suo
|
||||
projects/*.sdf
|
||||
projects/*.opensdf
|
||||
projects/*.vcproj.*.user
|
||||
projects/*.vcxproj.user
|
||||
projects/*.VC.db
|
||||
projects/*.VC.opendb
|
||||
src/rev.cpp
|
||||
src/os/windows/ottdres.rc
|
||||
|
||||
/Makefile*
|
||||
!/Makefile.msvc
|
||||
/config.*
|
||||
!/config.lib
|
||||
!*.in
|
||||
*.tmp
|
||||
|
34
.hgignore
Normal file
34
.hgignore
Normal file
@@ -0,0 +1,34 @@
|
||||
syntax: glob
|
||||
|
||||
.svn
|
||||
*.aps
|
||||
bin/baseset/openttd.32.bmp
|
||||
bin/lang/*
|
||||
bin/openttd*
|
||||
bin/*.cfg
|
||||
bundle/*
|
||||
bundles/*
|
||||
config.cache*
|
||||
config.log
|
||||
config.pwd
|
||||
docs/aidocs/*
|
||||
docs/gamedocs/*
|
||||
docs/source/*
|
||||
.kdev4
|
||||
.kdev4/*
|
||||
*.kdev4
|
||||
Makefile
|
||||
Makefile.am
|
||||
Makefile.bundle
|
||||
media/openttd.desktop
|
||||
media/openttd.desktop.install
|
||||
objs/*
|
||||
projects/.vs
|
||||
projects/*.ncb
|
||||
projects/*.suo
|
||||
projects/*.sdf
|
||||
projects/*.opensdf
|
||||
projects/*.vcproj.*.user
|
||||
projects/*.vcxproj.user
|
||||
src/rev.cpp
|
||||
src/os/windows/ottdres.rc
|
450
CMakeLists.txt
450
CMakeLists.txt
@@ -1,450 +0,0 @@
|
||||
cmake_minimum_required(VERSION 3.16)
|
||||
|
||||
if(NOT BINARY_NAME)
|
||||
set(BINARY_NAME openttd)
|
||||
endif()
|
||||
|
||||
project(${BINARY_NAME}
|
||||
VERSION 14.0
|
||||
LANGUAGES CXX
|
||||
)
|
||||
|
||||
if(CMAKE_SOURCE_DIR STREQUAL CMAKE_BINARY_DIR)
|
||||
message(FATAL_ERROR "In-source builds not allowed. Please run \"cmake ..\" from the build directory. You may need to delete \"${CMAKE_SOURCE_DIR}/CMakeCache.txt\" first.")
|
||||
endif()
|
||||
|
||||
# Debug mode by default.
|
||||
if(NOT CMAKE_BUILD_TYPE)
|
||||
set(CMAKE_BUILD_TYPE Debug)
|
||||
endif()
|
||||
|
||||
if (EMSCRIPTEN)
|
||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/os/emscripten/cmake")
|
||||
endif()
|
||||
|
||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake")
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET 10.15)
|
||||
|
||||
# Use GNUInstallDirs to allow customisation
|
||||
# but set our own default data and bin dir
|
||||
if(NOT CMAKE_INSTALL_DATADIR)
|
||||
set(CMAKE_INSTALL_DATADIR "share/games")
|
||||
endif()
|
||||
if(NOT CMAKE_INSTALL_BINDIR)
|
||||
set(CMAKE_INSTALL_BINDIR "games")
|
||||
endif()
|
||||
include(GNUInstallDirs)
|
||||
|
||||
include(Options)
|
||||
set_options()
|
||||
set_directory_options()
|
||||
|
||||
include(Static)
|
||||
set_static_if_needed()
|
||||
|
||||
set(CMAKE_CXX_STANDARD 17)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED YES)
|
||||
set(CMAKE_CXX_EXTENSIONS NO)
|
||||
|
||||
set(CMAKE_EXPORT_COMPILE_COMMANDS YES)
|
||||
|
||||
# An empty target for the tools
|
||||
add_custom_target(tools)
|
||||
|
||||
include(Endian)
|
||||
add_endian_definition()
|
||||
|
||||
include(CompileFlags)
|
||||
compile_flags()
|
||||
|
||||
if(APPLE OR UNIX)
|
||||
add_definitions(-DUNIX)
|
||||
endif()
|
||||
|
||||
if(UNIX)
|
||||
find_package(Doxygen)
|
||||
endif()
|
||||
|
||||
list(APPEND GENERATED_SOURCE_FILES "${CMAKE_BINARY_DIR}/generated/rev.cpp")
|
||||
if(WIN32)
|
||||
list(APPEND GENERATED_SOURCE_FILES "${CMAKE_BINARY_DIR}/generated/ottdres.rc")
|
||||
endif()
|
||||
|
||||
# Generate a target to determine version, which is execute every 'make' run
|
||||
add_custom_target(find_version
|
||||
${CMAKE_COMMAND}
|
||||
-DFIND_VERSION_BINARY_DIR=${CMAKE_BINARY_DIR}/generated
|
||||
-DCPACK_BINARY_DIR=${CMAKE_BINARY_DIR}
|
||||
-DREV_MAJOR=${PROJECT_VERSION_MAJOR}
|
||||
-DREV_MINOR=${PROJECT_VERSION_MINOR}
|
||||
-DWINDOWS=${WIN32}
|
||||
-P "${CMAKE_SOURCE_DIR}/cmake/scripts/FindVersion.cmake"
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||
BYPRODUCTS ${GENERATED_SOURCE_FILES}
|
||||
)
|
||||
|
||||
# Documentation
|
||||
if(DOXYGEN_EXECUTABLE)
|
||||
add_custom_target(docs)
|
||||
add_custom_target(docs_source
|
||||
${CMAKE_COMMAND} -E make_directory ${CMAKE_BINARY_DIR}/docs
|
||||
COMMAND ${DOXYGEN_EXECUTABLE} ${CMAKE_BINARY_DIR}/Doxyfile
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||
COMMENT "Generating documentation for source"
|
||||
)
|
||||
add_dependencies(docs_source
|
||||
find_version
|
||||
)
|
||||
add_dependencies(docs
|
||||
docs_source
|
||||
)
|
||||
endif()
|
||||
|
||||
include(AddCustomXXXTimestamp)
|
||||
|
||||
if(OPTION_TOOLS_ONLY)
|
||||
if(HOST_BINARY_DIR)
|
||||
unset(HOST_BINARY_DIR CACHE)
|
||||
endif()
|
||||
add_subdirectory(${CMAKE_SOURCE_DIR}/src)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if(APPLE)
|
||||
# Avoid searching for headers in Frameworks, and libraries in LIBDIR.
|
||||
set(CMAKE_FIND_FRAMEWORK LAST)
|
||||
endif()
|
||||
|
||||
# Prefer -pthread over -lpthread, which is often the better option of the two.
|
||||
set(CMAKE_THREAD_PREFER_PTHREAD YES)
|
||||
# Make sure we have Threads available.
|
||||
find_package(Threads REQUIRED)
|
||||
# nlohmann is used for all our JSON needs.
|
||||
find_package(nlohmann_json REQUIRED)
|
||||
|
||||
find_package(ZLIB)
|
||||
find_package(LibLZMA)
|
||||
find_package(LZO)
|
||||
find_package(PNG)
|
||||
|
||||
if(WIN32 OR EMSCRIPTEN)
|
||||
# Windows uses WinHttp for HTTP requests.
|
||||
# Emscripten uses Javascript for HTTP requests.
|
||||
else()
|
||||
# All other targets use libcurl.
|
||||
find_package(CURL)
|
||||
endif()
|
||||
|
||||
# Breakpad doesn't support emscripten.
|
||||
if(NOT EMSCRIPTEN)
|
||||
find_package(unofficial-breakpad)
|
||||
endif()
|
||||
|
||||
if(NOT OPTION_DEDICATED)
|
||||
if(NOT WIN32)
|
||||
find_package(Allegro)
|
||||
if(NOT APPLE)
|
||||
find_package(Freetype)
|
||||
find_package(SDL2)
|
||||
if(NOT SDL2_FOUND)
|
||||
find_package(SDL)
|
||||
endif()
|
||||
find_package(Fluidsynth)
|
||||
if(Freetype_FOUND)
|
||||
find_package(Fontconfig)
|
||||
endif()
|
||||
find_package(Harfbuzz)
|
||||
find_package(ICU OPTIONAL_COMPONENTS i18n)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
if(APPLE)
|
||||
enable_language(OBJCXX)
|
||||
|
||||
find_package(Iconv)
|
||||
|
||||
find_library(AUDIOTOOLBOX_LIBRARY AudioToolbox)
|
||||
find_library(AUDIOUNIT_LIBRARY AudioUnit)
|
||||
find_library(COCOA_LIBRARY Cocoa)
|
||||
find_library(QUARTZCORE_LIBRARY QuartzCore)
|
||||
endif()
|
||||
|
||||
if(NOT EMSCRIPTEN AND NOT OPTION_DEDICATED)
|
||||
find_package(OpenGL COMPONENTS OpenGL)
|
||||
endif()
|
||||
|
||||
if(MSVC)
|
||||
find_package(Editbin REQUIRED)
|
||||
endif()
|
||||
|
||||
find_package(SSE)
|
||||
find_package(Xaudio2)
|
||||
|
||||
find_package(Grfcodec)
|
||||
|
||||
include(CheckIPOSupported)
|
||||
check_ipo_supported(RESULT IPO_FOUND)
|
||||
|
||||
show_options()
|
||||
|
||||
if(UNIX AND NOT APPLE AND NOT OPTION_DEDICATED)
|
||||
if(NOT SDL_FOUND AND NOT SDL2_FOUND AND NOT ALLEGRO_FOUND)
|
||||
message(FATAL_ERROR "SDL, SDL2 or Allegro is required for this platform")
|
||||
endif()
|
||||
if(HARFBUZZ_FOUND AND NOT ICU_i18n_FOUND)
|
||||
message(WARNING "HarfBuzz depends on ICU i18n to function; HarfBuzz will be disabled")
|
||||
endif()
|
||||
if(NOT HARFBUZZ_FOUND)
|
||||
message(WARNING "Without HarfBuzz and ICU i18n the game will not be able to render right-to-left languages correctly")
|
||||
endif()
|
||||
endif()
|
||||
if(APPLE)
|
||||
if(NOT AUDIOTOOLBOX_LIBRARY)
|
||||
message(FATAL_ERROR "AudioToolbox is required for this platform")
|
||||
endif()
|
||||
if(NOT AUDIOUNIT_LIBRARY)
|
||||
message(FATAL_ERROR "AudioUnit is required for this platform")
|
||||
endif()
|
||||
if(NOT COCOA_LIBRARY)
|
||||
message(FATAL_ERROR "Cocoa is required for this platform")
|
||||
endif()
|
||||
if(NOT QUARTZCORE_LIBRARY)
|
||||
message(FATAL_ERROR "QuartzCore is required for this platform")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(OPTION_PACKAGE_DEPENDENCIES)
|
||||
if(NOT UNIX)
|
||||
message(FATAL_ERROR "Can only package dependencies on Linux")
|
||||
endif()
|
||||
if(OPTION_INSTALL_FHS)
|
||||
message(FATAL_ERROR "Cannot install in FHS folders when we are packaging dependencies")
|
||||
endif()
|
||||
if(${CMAKE_VERSION} VERSION_LESS "3.16.0")
|
||||
message(FATAL_ERROR "OPTION_PACKAGE_DEPENDENCIES can only work with CMake 3.16+; you are using ${CMAKE_VERSION}")
|
||||
endif()
|
||||
|
||||
# If we are packaging dependencies, we do two things:
|
||||
# 1) set the RPATH to include $ORIGIN/lib; $ORIGIN (that literal string)
|
||||
# is a Linux indicator for "path where application is". In CMake, we
|
||||
# have to do this before add_executable() is executed.
|
||||
# 2) copy the libraries that we compile against to the "lib" folder.
|
||||
# This is done in InstallAndPackage.cmake.
|
||||
set(CMAKE_INSTALL_RPATH "\$ORIGIN/lib")
|
||||
set(CMAKE_BUILD_WITH_INSTALL_RPATH ON)
|
||||
endif()
|
||||
|
||||
include(CTest)
|
||||
include(SourceList)
|
||||
|
||||
# Needed by rev.cpp
|
||||
include_directories(${CMAKE_SOURCE_DIR}/src)
|
||||
# Needed by everything that uses Squirrel
|
||||
include_directories(${CMAKE_SOURCE_DIR}/src/3rdparty/squirrel/include)
|
||||
|
||||
include(MSVCFilters)
|
||||
|
||||
add_library(openttd_lib OBJECT ${GENERATED_SOURCE_FILES})
|
||||
add_executable(openttd WIN32)
|
||||
add_executable(openttd_test)
|
||||
set_target_properties(openttd PROPERTIES OUTPUT_NAME "${BINARY_NAME}")
|
||||
# All other files are added via target_sources()
|
||||
|
||||
if(MSVC)
|
||||
# Add DPI manifest to project; other WIN32 targets get this via ottdres.rc
|
||||
target_sources(openttd PRIVATE "${CMAKE_SOURCE_DIR}/os/windows/openttd.manifest")
|
||||
|
||||
# If target -static is used, switch our project to static (/MT) too.
|
||||
# If the target ends on -static-md, it will remain dynamic (/MD).
|
||||
if(VCPKG_TARGET_TRIPLET MATCHES "-static" AND NOT VCPKG_TARGET_TRIPLET MATCHES "-md")
|
||||
set_property(TARGET openttd_lib PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded$<$<CONFIG:Debug>:Debug>")
|
||||
set_property(TARGET openttd PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded$<$<CONFIG:Debug>:Debug>")
|
||||
set_property(TARGET openttd_test PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded$<$<CONFIG:Debug>:Debug>")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
target_precompile_headers(openttd_lib
|
||||
PRIVATE
|
||||
src/stdafx.h
|
||||
src/core/format.hpp
|
||||
)
|
||||
|
||||
add_subdirectory(${CMAKE_SOURCE_DIR}/bin)
|
||||
add_subdirectory(${CMAKE_SOURCE_DIR}/src)
|
||||
add_subdirectory(${CMAKE_SOURCE_DIR}/media)
|
||||
|
||||
add_dependencies(openttd
|
||||
find_version)
|
||||
|
||||
target_link_libraries(openttd_lib
|
||||
openttd::languages
|
||||
openttd::settings
|
||||
openttd::script_api
|
||||
Threads::Threads
|
||||
)
|
||||
|
||||
target_link_libraries(openttd
|
||||
openttd_lib
|
||||
openttd::media
|
||||
openttd::basesets
|
||||
)
|
||||
|
||||
target_link_libraries(openttd_test PRIVATE openttd_lib)
|
||||
include(Catch)
|
||||
catch_discover_tests(openttd_test)
|
||||
|
||||
if(HAIKU)
|
||||
target_link_libraries(openttd_lib "be" "network" "midi")
|
||||
endif()
|
||||
|
||||
if(IPO_FOUND)
|
||||
set_target_properties(openttd PROPERTIES INTERPROCEDURAL_OPTIMIZATION_RELEASE True)
|
||||
set_target_properties(openttd PROPERTIES INTERPROCEDURAL_OPTIMIZATION_MINSIZEREL True)
|
||||
set_target_properties(openttd PROPERTIES INTERPROCEDURAL_OPTIMIZATION_RELWITHDEBINFO True)
|
||||
endif()
|
||||
set_target_properties(openttd PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_BINARY_DIR}")
|
||||
process_compile_flags()
|
||||
|
||||
include(LinkPackage)
|
||||
link_package(PNG TARGET PNG::PNG ENCOURAGED)
|
||||
link_package(ZLIB TARGET ZLIB::ZLIB ENCOURAGED)
|
||||
link_package(LIBLZMA TARGET LibLZMA::LibLZMA ENCOURAGED)
|
||||
link_package(LZO)
|
||||
link_package(nlohmann_json)
|
||||
|
||||
if(NOT WIN32 AND NOT EMSCRIPTEN)
|
||||
link_package(CURL ENCOURAGED)
|
||||
endif()
|
||||
|
||||
if(NOT EMSCRIPTEN)
|
||||
link_package(unofficial-breakpad TARGET unofficial::breakpad::libbreakpad_client ENCOURAGED)
|
||||
endif()
|
||||
|
||||
if(NOT OPTION_DEDICATED)
|
||||
link_package(Fluidsynth)
|
||||
link_package(SDL)
|
||||
link_package(SDL2 TARGET SDL2::SDL2)
|
||||
link_package(Allegro)
|
||||
link_package(FREETYPE TARGET Freetype::Freetype)
|
||||
link_package(Fontconfig TARGET Fontconfig::Fontconfig)
|
||||
link_package(Harfbuzz TARGET harfbuzz::harfbuzz)
|
||||
link_package(ICU_i18n)
|
||||
|
||||
if(SDL2_FOUND AND OPENGL_FOUND AND UNIX)
|
||||
# SDL2 dynamically loads OpenGL if needed, so do not link to OpenGL when
|
||||
# on Linux. For Windows, we need to link to OpenGL as we also have a win32
|
||||
# driver using it.
|
||||
add_definitions(-DWITH_OPENGL)
|
||||
message(STATUS "OpenGL found -- -DWITH_OPENGL -- (via SDL2)")
|
||||
else()
|
||||
link_package(OpenGL TARGET OpenGL::GL)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
include(CheckAtomic)
|
||||
|
||||
if(APPLE)
|
||||
link_package(Iconv TARGET Iconv::Iconv)
|
||||
|
||||
target_link_libraries(openttd_lib
|
||||
${AUDIOTOOLBOX_LIBRARY}
|
||||
${AUDIOUNIT_LIBRARY}
|
||||
${COCOA_LIBRARY}
|
||||
${QUARTZCORE_LIBRARY}
|
||||
)
|
||||
|
||||
add_definitions(
|
||||
-DWITH_COCOA
|
||||
)
|
||||
endif()
|
||||
|
||||
if(EMSCRIPTEN)
|
||||
add_library(WASM::WASM INTERFACE IMPORTED)
|
||||
|
||||
# Allow heap-growth, and start with a bigger memory size.
|
||||
target_link_libraries(WASM::WASM INTERFACE "-s ALLOW_MEMORY_GROWTH=1")
|
||||
target_link_libraries(WASM::WASM INTERFACE "-s INITIAL_MEMORY=33554432")
|
||||
target_link_libraries(WASM::WASM INTERFACE "-s DISABLE_EXCEPTION_CATCHING=0")
|
||||
target_link_libraries(WASM::WASM INTERFACE "-s WASM_BIGINT")
|
||||
add_definitions(-s DISABLE_EXCEPTION_CATCHING=0)
|
||||
|
||||
# Export functions to Javascript.
|
||||
target_link_libraries(WASM::WASM INTERFACE "-s EXPORTED_FUNCTIONS='[\"_main\", \"_em_openttd_add_server\"]' -s EXPORTED_RUNTIME_METHODS='[\"cwrap\"]'")
|
||||
|
||||
# Preload all the files we generate during build.
|
||||
# As we do not compile with FreeType / FontConfig, we also have no way to
|
||||
# render several languages (like Chinese, ..), so where do you draw the
|
||||
# line what languages to include and which not? In the end, especially as
|
||||
# the more languages you add the slower downloading becomes, we decided to
|
||||
# only ship the English language.
|
||||
target_link_libraries(WASM::WASM INTERFACE "--preload-file ${CMAKE_BINARY_DIR}/baseset@/baseset")
|
||||
target_link_libraries(WASM::WASM INTERFACE "--preload-file ${CMAKE_BINARY_DIR}/lang/english.lng@/lang/english.lng")
|
||||
target_link_libraries(WASM::WASM INTERFACE "--preload-file ${CMAKE_SOURCE_DIR}/bin/ai@/ai")
|
||||
target_link_libraries(WASM::WASM INTERFACE "--preload-file ${CMAKE_SOURCE_DIR}/bin/game@/game")
|
||||
|
||||
# We use IDBFS for persistent storage.
|
||||
target_link_libraries(WASM::WASM INTERFACE "-lidbfs.js")
|
||||
|
||||
# Use custom pre-js and shell.html.
|
||||
target_link_libraries(WASM::WASM INTERFACE "--pre-js ${CMAKE_SOURCE_DIR}/os/emscripten/pre.js")
|
||||
target_link_libraries(WASM::WASM INTERFACE "--shell-file ${CMAKE_SOURCE_DIR}/os/emscripten/shell.html")
|
||||
|
||||
# Build the .html (which builds the .js, .wasm, and .data too).
|
||||
set_target_properties(openttd PROPERTIES SUFFIX ".html")
|
||||
target_link_libraries(openttd WASM::WASM)
|
||||
endif()
|
||||
|
||||
if(NOT PERSONAL_DIR STREQUAL "(not set)")
|
||||
add_definitions(
|
||||
-DWITH_PERSONAL_DIR
|
||||
-DPERSONAL_DIR="${PERSONAL_DIR}"
|
||||
)
|
||||
endif()
|
||||
|
||||
if(NOT SHARED_DIR STREQUAL "(not set)")
|
||||
add_definitions(
|
||||
-DWITH_SHARED_DIR
|
||||
-DSHARED_DIR="${SHARED_DIR}"
|
||||
)
|
||||
endif()
|
||||
|
||||
if(NOT GLOBAL_DIR STREQUAL "(not set)")
|
||||
add_definitions(
|
||||
-DGLOBAL_DATA_DIR="${GLOBAL_DIR}"
|
||||
)
|
||||
endif()
|
||||
|
||||
link_package(SSE)
|
||||
|
||||
add_definitions_based_on_options()
|
||||
|
||||
if(WIN32)
|
||||
add_definitions(
|
||||
-DUNICODE
|
||||
-D_UNICODE
|
||||
-DWITH_UNISCRIBE
|
||||
-DPSAPI_VERSION=1
|
||||
)
|
||||
|
||||
target_link_libraries(openttd_lib
|
||||
ws2_32
|
||||
winmm
|
||||
imm32
|
||||
usp10
|
||||
psapi
|
||||
winhttp
|
||||
)
|
||||
endif()
|
||||
|
||||
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||
add_definitions(-DPOINTER_IS_64BIT)
|
||||
endif()
|
||||
|
||||
include(CreateRegression)
|
||||
create_regression()
|
||||
|
||||
if(APPLE OR WIN32)
|
||||
find_package(Pandoc)
|
||||
endif()
|
||||
|
||||
include(InstallAndPackage)
|
488
CODINGSTYLE.md
488
CODINGSTYLE.md
@@ -1,488 +0,0 @@
|
||||
**Why a set coding style is important** <br>
|
||||
This project is an open source project. To get open source working as intended, many people should be able to comprehend the code. This implies that there is a well documented and uniform flow of code.
|
||||
That does not necessarily mean that a contributor should not write optimised yet cryptic code - one should always care about performance. However, other developers need to understand the code which means complicated parts of code **must** have good documentation.
|
||||
|
||||
**Why we require that EVERYTHING is documented**<br>
|
||||
What is simple to some might appear very complicated to others. Documentation helps these others. Anyone should be able to improve the code. But the main reason is, that when patch contributors want their patches added to the common codebase, the code needs to be reviewed by one or more developers. Documentation makes reviewing much easier.
|
||||
|
||||
## Coding style for OpenTTD
|
||||
### Functions
|
||||
* Function names use [CamelCase](http://www.wikipedia.org/wiki/Camelcase) without underscores.
|
||||
* Opening curly bracket **{** for a function starts on the next line.
|
||||
* Use Foo() instead of Foo(void).
|
||||
```c++
|
||||
void ThisIsAFunction()
|
||||
{
|
||||
}
|
||||
```
|
||||
|
||||
### Variables
|
||||
* Variable names are all lowercase, and use "_" as separator.
|
||||
* Global variables are preceded by an underscore. ("_") Use descriptive names because leading underscores are often used for system / compiler variables.
|
||||
* Own members of classes should always be referenced using "this->"
|
||||
* Pointers and references should have their reference symbol next to the name (compatibility with current code).
|
||||
* Variables that are declared below one another should have their type, name or reference operator, and assignment operator aligned by spaces.
|
||||
* There are set names for many variables. Those are (but not limited to): Vehicle *u, *v, *w; Station *st; Town *t; Window *w; Engine *e.
|
||||
* For multiple instances, use numbers "*t1, *t2" or postfixes "*st_from, *st_to".
|
||||
* Declare variables upon first usage.
|
||||
* Declare iterators in their loop.
|
||||
* There is a space between '*' and 'const' in "const pointers"
|
||||
```c++
|
||||
int number = 10;
|
||||
Vehicle *u_first_wagon = v->next;
|
||||
int value = v->value;
|
||||
|
||||
uint32 _global_variable = 3750;
|
||||
|
||||
static const char * const _global_array[] = {
|
||||
"first string",
|
||||
"second string",
|
||||
"another string",
|
||||
"last string followed by comma",
|
||||
};
|
||||
|
||||
protected:
|
||||
char protected_array[10];
|
||||
|
||||
for (int i = 0;;);
|
||||
```
|
||||
|
||||
* Give the variables expedient names, this increases the readability of the code
|
||||
```c++
|
||||
bool is_maglev_train = true;
|
||||
if (!is_maglev_train) DoSomething();
|
||||
```
|
||||
|
||||
* Some people like to introduce copies of variables to increase readability, which can waste memory. However, in some cases, especially in code pieces which are often called, it makes sense to cache some calculated variables.
|
||||
```c++
|
||||
/* Unoptimized code:
|
||||
* foo is not touched inside the loop!
|
||||
*/
|
||||
for (uint8 i = 0; i < 100000; i++) {
|
||||
/* Do something */
|
||||
if (value_to_check == (foo * 4) % 5 + 6) DoSomething();
|
||||
/* Do something else */
|
||||
}
|
||||
|
||||
/* Better:
|
||||
* The used value of foo is calculated outside the loop.
|
||||
*/
|
||||
const uint32 bar = (foo * 4) % 5 + 6;
|
||||
for (uint8 i = 0; i < 100000; i++) {
|
||||
/* Do something */
|
||||
if (value_to_check == bar) DoSomething();
|
||||
/* Do something else */
|
||||
}
|
||||
```
|
||||
|
||||
### Enumerations / static consts
|
||||
* Enumerations store integers that belong logically together (railtypes, string IDs, etc.).
|
||||
* Enumeration names also use CamelCase.
|
||||
* Unscoped enumerators are all caps with "_" between the words.
|
||||
* Scoped enumerators are use CamelCase.
|
||||
* Enums are not used to store single numbers.
|
||||
* Enums have consecutive numbers OR
|
||||
* Enums have consecutive powers of two. Powers of two (bits) are written in hex or with the shift operator.
|
||||
* Enums may have special enumerators: "_BEGIN", "\_END", and "INVALID\_"). See example.
|
||||
* The invalid always has 0xFF, 0xFFFF, 0xFFFFFFFF as a value.
|
||||
* Other special values are consecutively less than the invalid.
|
||||
* Variables that hold enumerators should have the type of the enumeration.
|
||||
```c++
|
||||
enum DiagDirection {
|
||||
DIAGDIR_BEGIN = 0,
|
||||
DIAGDIR_NE = 0,
|
||||
DIAGDIR_SE = 1,
|
||||
DIAGDIR_SW = 2,
|
||||
DIAGDIR_NW = 3,
|
||||
DIAGDIR_END,
|
||||
INVALID_DIAGDIR = 0xFF,
|
||||
BROKEN_DIAGDIR = 0xFE,
|
||||
};
|
||||
|
||||
enum {
|
||||
DEPOT_SERVICE = (1 << 0),
|
||||
DEPOT_MASS_SEND = (1 << 1),
|
||||
DEPOT_DONT_CANCEL = (1 << 2),
|
||||
DEPOT_LOCATE_HANGAR = (1 << 3),
|
||||
};
|
||||
|
||||
DiagDirection enterdir = DIAGDIR_NE;
|
||||
```
|
||||
|
||||
* Numbers that store single or uncorrelated data are static consts. Those may use the naming conventions of enums.
|
||||
Example:
|
||||
```c++
|
||||
static const int MAXIMUM_STATIONS = 42;
|
||||
```
|
||||
|
||||
* Enums are useful in GUI code: When widgets are enumerated, they are easier to access during many operations. Additionally changes caused by modified widget sequences require less code adapting. If a widget is used like this, its enum name should be present in a comment behind the corresponding widget definition.
|
||||
```c++
|
||||
/** Enum referring to the widgets of the build rail depot window */
|
||||
enum BuildRailDepotWidgets {
|
||||
BRDW_CLOSEBOX = 0,
|
||||
BRDW_CAPTION,
|
||||
BRDW_BACKGROUND,
|
||||
BRDW_DEPOT_NE,
|
||||
BRDW_DEPOT_SE,
|
||||
BRDW_DEPOT_SW,
|
||||
BRDW_DEPOT_NW,
|
||||
};
|
||||
/* ... */
|
||||
/** Widget definition of the build rail depot window */
|
||||
static const Widget _build_depot_widgets[] = {
|
||||
{ WWT_CLOSEBOX, RESIZE_NONE, 7, 0, 10, 0, 13, STR_00C5, STR_..}, // BRDW_CLOSEBOX
|
||||
{ WWT_CAPTION, RESIZE_NONE, 7, 11, 139, 0, 13, STR_..., STR_...}, // BRDW_CAPTION
|
||||
{ WWT_PANEL, RESIZE_NONE, 7, 0, 139, 14, 121, 0x0, STR_NULL}, // BRDW_BACKGROUND
|
||||
{ WWT_PANEL, RESIZE_NONE, 14, 71, 136, 17, 66, 0x0, STR_..}, // BRDW_DEPOT_NE
|
||||
{ WWT_PANEL, RESIZE_NONE, 14, 71, 136, 69, 118, 0x0, STR_..}, // BRDW_DEPOT_SE
|
||||
{ WWT_PANEL, RESIZE_NONE, 14, 3, 68, 69, 118, 0x0, STR_..}, // BRDW_DEPOT_SW
|
||||
{ WWT_PANEL, RESIZE_NONE, 14, 3, 68, 17, 66, 0x0, STR_..}, // BRDW_DEPOT_NW
|
||||
{ WIDGETS_END},
|
||||
};
|
||||
```
|
||||
|
||||
* Comments on the enum values should start with "///<" to enable doxygen documentation
|
||||
```c++
|
||||
enum SomeEnumeration {
|
||||
SE_BEGIN = 0, ///< Begin of the enumeration, used for iterations
|
||||
SE_FOO = 0, ///< Used for xy
|
||||
SE_BAR, ///< Another value of the enumeration
|
||||
SE_SUB, ///< Special case for z
|
||||
SE_END, ///< End of the enumeration, used for iterations
|
||||
};
|
||||
```
|
||||
|
||||
### Control flow
|
||||
* Put a space before the parentheses in **if**, **switch**, **for** and **while** statements.
|
||||
* Use curly braces and put the contained statements on their own lines (e.g., don't put them directly after the **if**).
|
||||
* Opening curly bracket **{** stays on the first line, closing curly bracket **}** gets a line to itself (except for the **}** preceding an **else**, which should be on the same line as the **else**).
|
||||
* When only a single statement is contained, the brackets can be omitted. In this case, put the single statement on the same line as the preceding keyword (**if**, **while**, etc.). Note that this is only allowed for if statements without an **else** clause.
|
||||
* All fall throughs must be documented, using a **FALLTHROUGH** define/macro.
|
||||
* The NOT_REACHED() macro can be used in default constructs that should never be reached.
|
||||
* Unconditional loops are written with **`for (;;) {`**
|
||||
|
||||
```c++
|
||||
if (a == b) {
|
||||
Foo();
|
||||
} else {
|
||||
Bar();
|
||||
}
|
||||
|
||||
if (very_large_checks &&
|
||||
spread_over_two_lines) {
|
||||
Foo();
|
||||
}
|
||||
|
||||
if (a == b) Foo();
|
||||
|
||||
switch (a) {
|
||||
case 0: DoSomethingShort(); break;
|
||||
|
||||
case 1:
|
||||
DoSomething();
|
||||
FALLTHROUGH;
|
||||
|
||||
case 2:
|
||||
DoMore();
|
||||
b = a;
|
||||
break;
|
||||
|
||||
case 3: {
|
||||
int r = 2;
|
||||
|
||||
DoEvenMore(a);
|
||||
FALLTHROUGH;
|
||||
}
|
||||
|
||||
case 4: {
|
||||
int q = 345;
|
||||
|
||||
DoIt();
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
NOT_REACHED();
|
||||
}
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Foo();
|
||||
Bar();
|
||||
}
|
||||
```
|
||||
|
||||
### Classes
|
||||
* Classes names also use CamelCase.
|
||||
* Classes should have "public", "protected", and "private" sections.
|
||||
* Within these section the order is: types, static const members, static members, members, constructors / destructors, static methods, methods.
|
||||
* Deviations from above order are allowed when the code dictates it (e.g. a static const is needed for a typedef)
|
||||
* Methods and members ought to be grouped logically.
|
||||
* All those sorting rules sometimes conflict which one another. Please use common sense what increases legibility of the code in such a case.
|
||||
* The method implementation should indicate if it is virtual or similar by using a comment.
|
||||
* Very short methods can have one-line definition (if defined in the class scope).
|
||||
```c++
|
||||
class ThisIsAClass {
|
||||
public:
|
||||
typedef Titem_ *ItemPtr;
|
||||
private:
|
||||
static const int MAX_SIZE = 500;
|
||||
int size;
|
||||
ItemPtr *items;
|
||||
|
||||
public:
|
||||
explicit ThisIsAClass();
|
||||
~ThisIsAClass();
|
||||
|
||||
int GetSize() { return this->size; }
|
||||
virtual void Method();
|
||||
};
|
||||
|
||||
/*virtual*/ void Class::Method()
|
||||
{
|
||||
this->size *= 2;
|
||||
}
|
||||
```
|
||||
|
||||
### Templates
|
||||
Templates are a very powerful C++ tool, but they can easily confuse beginners. Thus:
|
||||
* Templates are to be documented in a very clear and verbose manner. Never assume anything in the documentation.
|
||||
* the template keyword and the template layout get a separate line. typenames are either "T" or preceded by a "T", integers get a single capital letter or a descriptive name preceded by "T".
|
||||
```c++
|
||||
template <typename T, typename Tsomething, int N, byte Tnumber_of_something>
|
||||
int Func();
|
||||
```
|
||||
|
||||
* If you are writing one or more template class in the dedicated header file, use file.hpp for its name instead of file.h. This will let others know that it is template library (includes also implementation), not just header with declarations.
|
||||
|
||||
### Other important rules
|
||||
* Put a space before and after binary operators: "a + b", "a == b", "a & b", "a <<= b", etc.. Exceptions are ".", "->" and "[]" (no spaces) and "," (just space after it).
|
||||
* Put parenthesis where it improves readability: "*(b++)" instead of "*b++", and "if ((a & b) && c == 2)" instead of "if (a & b && c == 2)".
|
||||
* Do not put external declarations in implementation (i.e. cpp) files.
|
||||
* Use const where possible.
|
||||
* Do not typedef enums and structs.
|
||||
* Don't treat non-flags as flags: use "if (char_pointer != nullptr && *char_pointer != '\0')", not "if (char_pointer && *char_pointer)".
|
||||
* Use "free(p)" instead of "if (p != nullptr) free(p)". "free(nullptr)" doesn't hurt anyone.
|
||||
* No trailing whitespace. The Github workflows will not allow tabs or space on the end of lines.
|
||||
* Only use tabs to indent from the start of the line.
|
||||
* Line length is unlimited. In practice it may be useful to split a long line. When splitting, add two tabs in front of the second part.
|
||||
* The '#' of preprocessor instructions goes into the first column of a line. Indenting is done after the '#' (using tabs again).
|
||||
* Use /* */ for single line comments.
|
||||
* Use // at the end of a command line to indicate comments.
|
||||
** However, use /* */ after # preprocessor statements as // causes warnings in some compilers and/or might have unwanted side effects.
|
||||
* C++ is defined using the ASCII character set. Do not use other character sets, not even in comments.
|
||||
* OpenTTD includes some Objective-C sources (*.mm, used by OSX), which has a special object method invocation syntax: "[ obj doStuff:foo ]". Please use spaces on the inside of the brackets to differentiate from the C array syntax.
|
||||
|
||||
## Documentation
|
||||
We use [Doxygen](http://doxygen.org/) to automatically generate documentation from the source code. It scans the source files for *recognizable* comments.
|
||||
* **Make your comments recognizable.**
|
||||
Doxygen only comments starting with the following style:
|
||||
```c++
|
||||
/**
|
||||
///<
|
||||
```
|
||||
|
||||
Use /** for multi-line comment blocks. Use ///< for single line comments for variables. Use //!< for single-line comments in the NoAI .hpp files.
|
||||
For comments blocks inside a function always use /* */ or //. Use // only if the comment is on the same line as an instruction, otherwise use /* */.
|
||||
|
||||
### Files
|
||||
* Put a @file command in a JavaDoc style comment at the start of the file, followed by a description.
|
||||
```c++
|
||||
/**
|
||||
* @file
|
||||
* This is the brief description.
|
||||
* This is the detailed description here and on the following lines.
|
||||
*/
|
||||
```
|
||||
> ### Warning
|
||||
> If a file lacks a **file comment block** then NO entities in that file will be documented by Doxygen!
|
||||
|
||||
### Functions
|
||||
* The documentation should be as close to the actual code as possible to maximise the chance of staying in sync.
|
||||
* Comments for functions go in the .cpp file.
|
||||
* Comments for inlines go in the .h/.hpp file.
|
||||
* Small inlines can have a short 3 or 4 line JavaDoc style comment.
|
||||
* Completely document larger functions.
|
||||
* Document obvious parameters and return values too!
|
||||
|
||||
```c++
|
||||
/**
|
||||
* A brief explanation of what the function does and/or what purpose it serves.
|
||||
* Then follows a more detailed explanation of the function that can span multiple lines.
|
||||
*
|
||||
* @param foo Explanation of the foo parameter
|
||||
* @param bar Explanation of the bar parameter
|
||||
* @return The sum of foo and bar (@return can be omitted if the return type is void)
|
||||
*
|
||||
* @see SomeOtherFunc()
|
||||
* @see SOME_ENUM
|
||||
*
|
||||
* @bug Some bug description
|
||||
* @bug Another bug description which continues in the next line
|
||||
* and ends with the following blank line
|
||||
*
|
||||
* @todo Some to-do entry
|
||||
*/
|
||||
static int FooBar(int foo, int bar)
|
||||
{
|
||||
return foo + bar;
|
||||
}
|
||||
```
|
||||
|
||||
### Classes
|
||||
* Document structs similarly to functions:
|
||||
```c++
|
||||
/**
|
||||
* A short description of the struct.
|
||||
* More detailed description of the its usage.
|
||||
*
|
||||
* @see [link to anything of interest]
|
||||
*/
|
||||
struct foo {
|
||||
}
|
||||
```
|
||||
|
||||
### JavaDoc structural commands
|
||||
|
||||
This table shows the commands you should use with OpenTTD. The full list is [here](http://www.stack.nl/~dimitri/doxygen/commands.html).
|
||||
|
||||
| Command | Action | Example |
|
||||
| ------- | -------- | ------- |
|
||||
| **@attention** | Starts a paragraph where a message that needs attention may be entered. The paragraph will be indented. | @attention Whales crossing! |
|
||||
| **@brief** | Starts a paragraph that serves as a brief description. For classes and files the brief description will be used in lists and at the start of the documentation page. For class and file members, the brief description will be placed at the declaration of the member and prepended to the detailed description. A brief description may span several lines (although it is advised to keep it brief!). | @brief This is the brief description. |
|
||||
| **@bug** | Starts a paragraph where one or more bugs may be reported. The paragraph will be indented. Multiple adjacent @bug commands will be joined into a single paragraph. Each bug description will start on a new line. Alternatively, one @bug command may mention several bugs. | @bug Memory leak in here? |
|
||||
| **@note** | Starts a paragraph where a note can be entered. The paragraph will be indented. | @note Might be slow |
|
||||
| **@todo** | Starts a paragraph where a TODO item is described. The description will also add an item to a separate TODO list. The two instances of the description will be cross-referenced. Each item in the TODO list will be preceded by a header that indicates the origin of the item. | @todo Better error checking |
|
||||
| **@warning** | Starts a paragraph where one or more warning messages may be entered. The paragraph will be indented. | @warning Not thread safe! |
|
||||
| | <small>**Function related commands**</small> | |
|
||||
| **@return** | Starts a return value description for a function. | @return a character pointer |
|
||||
| **@param** | Starts a parameter description for a function parameter with name <parameter-name>. Followed by a description of the parameter. The existence of the parameter is checked and a warning is given if the documentation of this (or any other) parameter is missing or not present in the function declaration or definition.<br><br>The @param command has an optional attribute specifying the direction of the attribute. Possible values are "in" and "out". | @param n The number of bytes to copy<br>@param[out] dest The memory area to copy to.<br>@param[in] src The memory area to copy from. |
|
||||
| **@see** | Starts a paragraph where one or more cross-references to classes, functions, methods, variables, files or URL may be specified. Two names joined by either :: or # are understood as referring to a class and one of its members. One of several overloaded methods or constructors may be selected by including a parenthesized list of argument types after the method name. [Here](http://www.stack.nl/~dimitri/doxygen/autolink.html) you can find detailed information about this feature. | @see OtherFunc() |
|
||||
| **@b** | Displays the following word using a bold font. Equivalent to <b>word</b>. To put multiple words in bold use <b>multiple words</b>.| ...@b this and @b that... |
|
||||
| **@c / @p** | Displays the parameter <word> using a typewriter font. You can use this command to refer to member function parameters in the running text. To have multiple words in typewriter font use <tt>multiple words</tt>. | ... the @p x and @p y coordinates are used to... |
|
||||
| **@arg / @li** | This command has one argument that continues until the first blank line or until another @arg / @li is encountered. The command can be used to generate a simple, not nested list of arguments. Each argument should start with an @arg / @li command. | @arg @c AlignLeft left alignment.<br>@arg @c AlignCenter center alignment.<br>@arg @c AlignRight right alignment |
|
||||
| **@n** | Forces a new line. Equivalent to and inspired by the printf function. |@n |
|
||||
|
||||
### More on Doxygen and JavaDoc
|
||||
|
||||
Doxygen knows two different kinds of comments:
|
||||
* *Brief descriptions*: one-liners that describe the function roughly ([example](http://docs.openttd.org/annotated.html))
|
||||
* *Detailed descriptions*: as the name suggests, these contain the detailed function/purpose of the entity they describe ([example](http://docs.openttd.org/structBridge.html))
|
||||
|
||||
You can omit either one or put them into different places but there's only one brief and one detailed description allowed for the same entity.
|
||||
|
||||
Doxygen knows three modes for documenting an entity:
|
||||
* Before the entity
|
||||
* After the entity
|
||||
* In a different file
|
||||
|
||||
The latter is a little harder to maintain since the prototype of the entity it describes then is stored in several places (e.g. the .h file and the file with the descriptions). Also while it makes the code easier to read it also makes it easier to omit the important step of updating the description of an entity if it was changed - and we all know why that shouldn't happen ;)<br>
|
||||
Because of those reasons, we will only use the first two documentation schemes.
|
||||
|
||||
|
||||
Doxygen supports both Qt and JavaDoc comment styles:
|
||||
* Qt style example: **int i; //!< The counter for the main loop**
|
||||
* JavaDoc style example: **int i; /*\*< The counter for the main loop \*/**
|
||||
|
||||
It also supports more comment styles but those two are the ones which are standardized. For OTTD we'll be using the JavaDoc style. One of the reasons is that it has a feature that the Qt style doesn't offer: JavaDoc style comment blocks will automatically start a brief description which ends at the first dot followed by a space or new line. Everything after that will also be part of the detailed description.
|
||||
|
||||
The general structure of a JavaDoc style comment is
|
||||
```c++
|
||||
/**
|
||||
* This is the brief description. And this sentence contains some further explanations that will appear in the detailed description only.
|
||||
*/
|
||||
```
|
||||
|
||||
and the resulting descriptions of that block would be:
|
||||
* *Brief description*: This is the brief description.
|
||||
* *Detailed description*: This is the brief description. And this sentence contains some further explanations that will appear in the detailed description only.
|
||||
|
||||
The distinction between the brief and detailed descriptions is made by the dot followed by a space/newline, so if you want to use that inside the brief description you need to escape the space/newline:
|
||||
```c++
|
||||
/**
|
||||
* This is a brief description (e.g.\ using only a few words). Details go here.
|
||||
*/
|
||||
```
|
||||
|
||||
If you're doing a one-line comment, use:
|
||||
```c++
|
||||
int i; ///< This is the description.
|
||||
```
|
||||
|
||||
Also in the comment block you can include so-called structural commands which tell Doxygen what follows. In general, their area of effect begins after the command word and ends when a blank line or some other command is encountered. Also, multiple occurrences of the same structural command within a comment block or the referring entity will be joined in the documentation output usually.
|
||||
|
||||
## Commit message
|
||||
To achieve a coherent whole and to make changelog writing easier, here are some guidelines for commit messages.
|
||||
There is a check-script on the git server (also available for clients, see below) to make sure we all follow those rules.
|
||||
|
||||
The first line of a message must match:
|
||||
```
|
||||
<keyword>( #<issue>| <commit>(, (<keyword> #<issue>|<commit>))*)?: ([<section])? <Details>
|
||||
```
|
||||
Keywords are:
|
||||
* Add, Feature: Adding new stuff. Difference between "Feature" and "Add" is somewhat subjective. "Feature" for user-point-of-view stuff, "Add" for other.
|
||||
* Change: Changing behaviour from user-point-of-view.
|
||||
* Remove: Removing something from user-point-of-view.
|
||||
* Codechange, Cleanup: Changes without intentional change of behaviour from user-point-of-view. Difference between "Codechange" and "Cleanup" is somewhat subjective.
|
||||
* Fix, Revert: Fixing stuff.
|
||||
* Doc, Update: Documentation changes, version increments, translator commits.
|
||||
* Prepare: Preparation for bigger changes. Rarely used.
|
||||
|
||||
If you commit a fix for an [issue](https://github.com/OpenTTD/OpenTTD/issues), add the corresponding issue number in the form of #NNNN. Do it as well if you implement a feature with a matching entry.
|
||||
|
||||
In the case of bugfixes, if you know what revision the bug was introduced (eg regression), please mention that revision as well just after the prefix. Finding the trouble-causing revision is highly encouraged as it makes backporting/branching/releases that much easier.
|
||||
|
||||
To further structure the changelog, you can add sections. Example are:
|
||||
* "Network" for network specific changes
|
||||
* "NewGRF" for NewGRF additions
|
||||
* "YAPP", "NPF", for changes in these features
|
||||
* "OSX", "Debian", "win32", for OS-specific packaging changes
|
||||
|
||||
Further explanations, general bitching, etc. don't go into the first line. Use a new line for those.
|
||||
|
||||
Complete examples:
|
||||
* Fix: [YAPF] Infinite loop in pathfinder.
|
||||
* Fix #5926: [YAPF] Infinite loop in pathfinder.
|
||||
* Fix 80dffae130: Warning about unsigned unary minus.
|
||||
* Fix #6673, 99bb3a95b4: Store the map variety setting in the samegame.
|
||||
* Revert d9065fbfbe, Fix #5922: ClientSizeChanged is only called via WndProcGdi which already has the mutex.
|
||||
* Fix #1264, Fix #2037, Fix #2038, Fix #2110: Rewrite the autoreplace kernel.
|
||||
|
||||
|
||||
## Other tips
|
||||
### Remove trailing whitespace
|
||||
To find out if/where you have trailing whitespace, you can use the following (unix/bash) command:
|
||||
```
|
||||
grep -n -R --include "*.[ch]" '[ ]$' * | grep --invert-match ".diff" | grep --invert-match ".patch"
|
||||
```
|
||||
Automatically removing whitespace is also possible with the following shell script (Note that it only checks .c, .cpp, .h, .hpp and .mm files):
|
||||
```
|
||||
#!/bin/sh
|
||||
IFS='
|
||||
'
|
||||
for i in Makefile `find . -name \*.c -o -name \*.cpp -o -name \*.h -o -name \*.hpp -o -name \*.mm`
|
||||
do
|
||||
(
|
||||
echo '%s/[ ]\{1,\}$/'
|
||||
echo w
|
||||
echo q
|
||||
) | ed $i 2> /dev/null > /dev/null
|
||||
done
|
||||
```
|
||||
|
||||
### Install the client-side git commit hooks
|
||||
|
||||
The client-side hooks perform various checks when you commit changes locally.
|
||||
* Whitespace and indentation checks.
|
||||
* **Coding style** checks.
|
||||
|
||||
Get the hooks:
|
||||
```
|
||||
git clone https://github.com/OpenTTD/OpenTTD-git-hooks.git openttd_hooks
|
||||
```
|
||||
|
||||
Install the hooks, assuming "openttd" is your work tree folder:
|
||||
```
|
||||
cd openttd/.git/hooks
|
||||
ln -s -t . ../../../openttd_hooks/hooks/*
|
||||
```
|
||||
|
173
COMPILING.md
173
COMPILING.md
@@ -2,35 +2,28 @@
|
||||
|
||||
## Required/optional libraries
|
||||
|
||||
OpenTTD makes use of the following external libraries:
|
||||
The following libraries are used by OpenTTD for:
|
||||
|
||||
- (required) nlohmann-json: JSON handling
|
||||
- (encouraged) breakpad: creates minidumps on crash
|
||||
- (encouraged) zlib: (de)compressing of old (0.3.0-1.0.5) savegames, content downloads,
|
||||
- zlib: (de)compressing of old (0.3.0-1.0.5) savegames, content downloads,
|
||||
heightmaps
|
||||
- (encouraged) liblzma: (de)compressing of savegames (1.1.0 and later)
|
||||
- (encouraged) libpng: making screenshots and loading heightmaps
|
||||
- (optional) liblzo2: (de)compressing of old (pre 0.3.0) savegames
|
||||
|
||||
For Linux, the following additional libraries are used:
|
||||
|
||||
- (encouraged) libcurl: content downloads
|
||||
- libSDL2: hardware access (video, sound, mouse)
|
||||
- liblzo2: (de)compressing of old (pre 0.3.0) savegames
|
||||
- liblzma: (de)compressing of savegames (1.1.0 and later)
|
||||
- libpng: making screenshots and loading heightmaps
|
||||
- libfreetype: loading generic fonts and rendering them
|
||||
- libfontconfig: searching for fonts, resolving font names to actual fonts
|
||||
- harfbuzz: handling of right-to-left scripts (e.g. Arabic and Persian) (required libicu)
|
||||
- libicu: handling of right-to-left scripts (e.g. Arabic and Persian) and
|
||||
natural sorting of strings
|
||||
|
||||
If you are building a dedicated-server only, you don't need the last four.
|
||||
natural sorting of strings (Linux only)
|
||||
- libSDL2: hardware access (video, sound, mouse) (not required for Windows or macOS)
|
||||
|
||||
OpenTTD does not require any of the libraries to be present, but without
|
||||
liblzma you cannot open most recent savegames and without zlib you cannot
|
||||
open most older savegames or use the content downloading system.
|
||||
Without libSDL/liballegro on non-Windows and non-macOS machines you have
|
||||
no graphical user interface; you would be building a dedicated server.
|
||||
|
||||
## Windows
|
||||
## Windows:
|
||||
|
||||
You need Microsoft Visual Studio 2017 or more recent.
|
||||
You need Microsoft Visual Studio 2015 Update 3 or newer.
|
||||
|
||||
You can download the free Visual Studio Community Edition from Microsoft at
|
||||
https://visualstudio.microsoft.com/vs/community/.
|
||||
@@ -51,108 +44,98 @@ by following the `Quick Start` instructions of their
|
||||
After this, you can install the dependencies OpenTTD needs. We advise to use
|
||||
the `static` versions, and OpenTTD currently needs the following dependencies:
|
||||
|
||||
- breakpad
|
||||
- liblzma
|
||||
- libpng
|
||||
- lzo
|
||||
- nlohmann-json
|
||||
- zlib
|
||||
|
||||
To install both the x64 (64bit) and x86 (32bit) variants (though only one is necessary), you can use:
|
||||
|
||||
```ps
|
||||
.\vcpkg install breakpad:x64-windows-static liblzma:x64-windows-static libpng:x64-windows-static lzo:x64-windows-static nlohmann-json:x64-windows-static zlib:x64-windows-static
|
||||
.\vcpkg install breakpad:x86-windows-static liblzma:x86-windows-static libpng:x86-windows-static lzo:x86-windows-static nlohmann-json:x86-windows-static zlib:x86-windows-static
|
||||
.\vcpkg install liblzma:x64-windows-static libpng:x64-windows-static lzo:x64-windows-static zlib:x64-windows-static
|
||||
.\vcpkg install liblzma:x86-windows-static libpng:x86-windows-static lzo:x86-windows-static zlib:x86-windows-static
|
||||
```
|
||||
|
||||
You can open the folder (as a CMake project). CMake will be detected, and you can compile from there.
|
||||
If libraries are installed but not found, you need to set VCPKG_TARGET_TRIPLET in CMake parameters.
|
||||
For Visual Studio 2017 you also need to set CMAKE_TOOLCHAIN_FILE.
|
||||
(Typical values are shown in the MSVC project file command line example)
|
||||
Open the relevant project file and it should build automatically.
|
||||
- VS 2015: projects/openttd_vs140.sln
|
||||
- VS 2017: projects/openttd_vs141.sln
|
||||
- VS 2019: projects/openttd_vs142.sln
|
||||
|
||||
Alternatively, you can create a MSVC project file via CMake. For this
|
||||
either download CMake from https://cmake.org/download/ or use the version
|
||||
that comes with vcpkg. After that, you can run something similar to this:
|
||||
Set the build mode to `Release` in
|
||||
`Build > Configuration manager > Active solution configuration`.
|
||||
You can now compile.
|
||||
|
||||
```powershell
|
||||
mkdir build
|
||||
cd build
|
||||
cmake.exe .. -G"Visual Studio 16 2019" -DCMAKE_TOOLCHAIN_FILE="<location of vcpkg>\vcpkg\scripts\buildsystems\vcpkg.cmake" -DVCPKG_TARGET_TRIPLET="x64-windows-static"
|
||||
```
|
||||
If everything works well the binary should be in `objs\Win[32|64]\Release\openttd.exe`
|
||||
and in `bin\openttd.exe`
|
||||
|
||||
Change `<location of vcpkg>` to where you have installed vcpkg. After this
|
||||
in the build folder are MSVC project files. MSVC can rebuild the project
|
||||
files himself via the `ZERO_CHECK` project.
|
||||
The OpenTTD wiki may provide additional help with [compiling for Windows](https://wiki.openttd.org/Compiling_on_Windows_using_Microsoft_Visual_C%2B%2B_2015).
|
||||
|
||||
## All other platforms
|
||||
Minimum required version of CMake is 3.9.
|
||||
By default this produces a Debug build with assertations enabled.
|
||||
This is a far slower build than release builds.
|
||||
You can also build OpenTTD with MSYS2/MinGW-w64 or Cygwin/MinGW using the Makefile. The OpenTTD wiki may provide additional help with [MSYS2](https://wiki.openttd.org/Compiling_on_Windows_using_MSYS2)
|
||||
|
||||
```bash
|
||||
mkdir build
|
||||
cd build
|
||||
cmake ..
|
||||
make
|
||||
```
|
||||
## Linux, Unix, Solaris:
|
||||
|
||||
For more information on how to use CMake (including how to make Release builds),
|
||||
we urge you to read [their excellent manual](https://cmake.org/cmake/help/latest/guide/user-interaction/index.html).
|
||||
OpenTTD can be built with GNU '`make`'. On non-GNU systems it is called '`gmake`'.
|
||||
However, for the first build one has to do a '`./configure`' first.
|
||||
|
||||
## CMake Options
|
||||
The OpenTTD wiki may provide additional help with:
|
||||
|
||||
Via CMake, several options can be influenced to get different types of
|
||||
builds.
|
||||
- [compiling for Linux and *BSD](https://wiki.openttd.org/Compiling_on_%28GNU/%29Linux_and_*BSD)
|
||||
- [compiling for Solaris](https://wiki.openttd.org/Compiling_on_Solaris)
|
||||
|
||||
- `-DCMAKE_BUILD_TYPE=RelWithDebInfo`: build a release build. This is
|
||||
significantly faster than a debug build, but has far less useful information
|
||||
in case of a crash.
|
||||
- `-DOPTION_DEDICATED=ON`: build OpenTTD without a GUI. Useful if you are
|
||||
running a headless server, as it requires less libraries to operate.
|
||||
- `-DOPTION_USE_ASSERTS=OFF`: disable asserts. Use with care, as assert
|
||||
statements capture early signs of trouble. Release builds have them
|
||||
disabled by default.
|
||||
- `-DOPTION_USE_THREADS=OFF`: disable the use of threads. This will block
|
||||
the interface in many places, and in general gives a worse experience of
|
||||
the game. Use with care.
|
||||
- `-DOPTION_TOOLS_ONLY=ON`: only build tools like `strgen`. Does not build
|
||||
the game itself. Useful for cross-compiling.
|
||||
|
||||
## macOS:
|
||||
|
||||
Use '`make`' or Xcode (which will then call make for you)
|
||||
This will give you a binary for your CPU type (PPC/Intel)
|
||||
However, for the first build one has to do a '`./configure`' first.
|
||||
To make a universal binary type '`./configure --enable-universal`'
|
||||
instead of '`./configure`'.
|
||||
|
||||
The OpenTTD wiki may provide additional help with [compiling for macOS](https://wiki.openttd.org/Compiling_on_Mac_OS_X).
|
||||
|
||||
## Haiku:
|
||||
|
||||
Use '`make`', but do a '`./configure`' before the first build.
|
||||
|
||||
The OpenTTD wiki may provide additional help with [compiling for Haiku](https://wiki.openttd.org/Compiling_on_Haiku).
|
||||
|
||||
## OS/2:
|
||||
|
||||
A comprehensive GNU build environment is required to build the OS/2 version.
|
||||
|
||||
The OpenTTD wiki may provide additional help with [compiling for OS/2](https://wiki.openttd.org/Compiling_on_OS/2).
|
||||
|
||||
## Supported compilers
|
||||
|
||||
Every compiler that is supported by CMake and supports C++17, should be
|
||||
able to compile OpenTTD. As the exact list of compilers changes constantly,
|
||||
we refer to the compiler manual to see if it supports C++17, and to CMake
|
||||
to see if it supports your compiler.
|
||||
The following compilers are tested with and known to compile OpenTTD:
|
||||
|
||||
- Microsoft Visual C++ (MSVC) 2015, 2017 and 2019.
|
||||
- GNU Compiler Collection (GCC) 4.8 - 9.
|
||||
- Clang/LLVM 3.9 - 8
|
||||
|
||||
The following compilers are known not to compile OpenTTD:
|
||||
|
||||
In general, this is because these old versions do not (fully) support modern
|
||||
C++11 language features.
|
||||
|
||||
- Microsoft Visual C++ (MSVC) 2013 and earlier.
|
||||
- GNU Compiler Collection (GCC) 4.7 and earlier.
|
||||
- Clang/LLVM 3.8 and earlier.
|
||||
|
||||
If any of these, or any other, compilers can compile OpenTTD, let us know.
|
||||
Pull requests to support more compilers are welcome.
|
||||
|
||||
## Compilation of base sets
|
||||
|
||||
To recompile the extra graphics needed to play with the original Transport
|
||||
Tycoon Deluxe graphics you need GRFCodec (which includes NFORenum) as well.
|
||||
GRFCodec can be found at
|
||||
https://www.openttd.org/downloads/grfcodec-releases/latest.html.
|
||||
GRFCodec can be found at https://www.openttd.org/download-grfcodec.
|
||||
The compilation of these extra graphics does generally not happen, unless
|
||||
you remove the graphics file using '`make maintainer-clean`'.
|
||||
|
||||
Having GRFCodec installed can cause regeneration of the `.grf` files, which
|
||||
are written in the source directory. This can leave your repository in a
|
||||
modified state, as different GRFCodec versions can cause binary differences
|
||||
in the resulting `.grf` files. Also translations might have been added for
|
||||
the base sets which are not yet included in the base set information files.
|
||||
To avoid this behaviour, disable GRFCodec (and NFORenum) in CMake cache
|
||||
(`GRFCODEC_EXECUTABLE` and `NFORENUM_EXECUTABLE`).
|
||||
|
||||
## Developers settings
|
||||
|
||||
You can control some flags directly via `CXXFLAGS` (any combination
|
||||
of these flags will work fine too):
|
||||
|
||||
- `-DRANDOM_DEBUG`: this helps with debugging desyncs.
|
||||
- `-fno-inline`: this avoids creating inline functions; this can make
|
||||
debugging a lot easier.
|
||||
- `-O0`: this disables all optimizations; this can make debugging a
|
||||
lot easier.
|
||||
- `-p`: this enables profiling.
|
||||
|
||||
Always use a clean buildfolder if you changing `CXXFLAGS`, as this
|
||||
value is otherwise cached. Example use:
|
||||
|
||||
`CXXFLAGS="-fno-inline" cmake ..`
|
||||
Re-compilation of the base sets, thus also use of '`--maintainer-clean`' can
|
||||
leave the repository in a modified state as different grfcodec versions can
|
||||
cause binary differences in the resulting grf. Also translations might have
|
||||
been added for the base sets which are not yet included in the base set
|
||||
information files. Use the configure option '`--without-grfcodec`' to avoid
|
||||
modification of the base set files by the build process.
|
||||
|
@@ -14,7 +14,7 @@ In return, they should reciprocate that respect in addressing your issue or asse
|
||||
The [issue tracker](https://github.com/OpenTTD/OpenTTD/issues) is the preferred channel for [bug reports](#bug-reports), but please respect the following restrictions:
|
||||
|
||||
* Please **do not** use the issue tracker for help playing or using OpenTTD.
|
||||
Please try [irc](https://wiki.openttd.org/en/Development/IRC%20channel), [Discord](https://discord.gg/openttd), or the [forums](https://www.tt-forums.net/)
|
||||
Please try [irc](https://wiki.openttd.org/IRC_channel), or the [forums](https://www.tt-forums.net/)
|
||||
|
||||
* Please **do not** derail or troll issues. Keep the discussion on topic and respect the opinions of others.
|
||||
|
||||
@@ -23,9 +23,7 @@ Use [GitHub's "reactions" feature](https://github.com/blog/2119-add-reactions-to
|
||||
We reserve the right to delete comments which violate this rule.
|
||||
|
||||
* Please **do not** open issues or pull requests regarding add-on content in NewGRF, GameScripts, AIs, etc.
|
||||
These are created by third-parties. Please try [irc](https://wiki.openttd.org/en/Development/IRC%20channel), [Discord](https://discord.gg/openttd), or the [forums](https://www.tt-forums.net/) to discuss these.
|
||||
|
||||
* Please use [the web translator](https://translator.openttd.org/) to submit corrections and improvements to translations of the game.
|
||||
These are created by third-parties. Please try [irc](https://wiki.openttd.org/IRC_channel) or the [forums](https://www.tt-forums.net/) to discuss these.
|
||||
|
||||
|
||||
## Bug reports
|
||||
@@ -35,16 +33,16 @@ Good bug reports are extremely helpful, so thanks!
|
||||
|
||||
Guidelines for bug reports:
|
||||
|
||||
0. Please don't report issues with games where you changed NewGRFs mid-game. (This can be verified with the `gamelog` console command in-game.)
|
||||
0. Please don't report issues with games where you changed NewGRFs.
|
||||
|
||||
1. Please don't report issues with modified versions of OpenTTD (patchpacks, unofficial ports, and similar).
|
||||
1. Please don't report issues with modified versions of OpenTTD (patchpacks and similar).
|
||||
|
||||
2. **Use the GitHub issue search** — check if the issue has already been
|
||||
2. **Use the GitHub issue search** --- check if the issue has already been
|
||||
reported.
|
||||
|
||||
3. **Check if the issue has been fixed** — try to reproduce it using the latest `nightly` build of OpenTTD, available from https://www.openttd.org
|
||||
3. **Check if the issue has been fixed** --- try to reproduce it using the latest `nightly` build of OpenTTD, available from https://www.openttd.org
|
||||
|
||||
4. **Isolate the problem** — ideally create reproducible steps with an attached savegame and screenshots. Try to use few or no NewGRFs, AIs etc if possible.
|
||||
4. **Isolate the problem** --- ideally create reproduceable steps with an attached savegame and screenshots. Try to use few or no NewGRFs, AIs etc if possible.
|
||||
|
||||
A good bug report shouldn't leave others needing to chase you up for more information.
|
||||
Please try to be as detailed as possible in your report.
|
||||
@@ -94,8 +92,8 @@ Although we really appreciate feedback and ideas, we will close feature requests
|
||||
|
||||
Many of those ideas etc do have a place on the [forums](https://www.tt-forums.net); and if enough people like it, someone will stand up and make it.
|
||||
|
||||
It's usually best to discuss on [Discord](https://discord.gg/openttd) before opening a feature request or working on a large feature in a fork.
|
||||
Discussion can take time, but it can be productive and avoid disappointment. :)
|
||||
It's usually best discuss in [irc](https://wiki.openttd.org/IRC_channel) before opening a feature request or working on a large feature in a fork.
|
||||
Discussion in irc can take time, but it can be productive and avoid disappointment :)
|
||||
|
||||
|
||||
## Pull requests
|
||||
@@ -108,7 +106,7 @@ Pull requests should fit with the [goals of the project](./CONTRIBUTING.md#proje
|
||||
|
||||
Every pull request should have a clear scope, with no unrelated commits.
|
||||
|
||||
[Code style](./CODINGSTYLE.md) must be complied with for pull requests to be accepted; this also includes [commit message format](./CODINGSTYLE.md#commit-message).
|
||||
[Code style](https://wiki.openttd.org/Coding_style) must be complied with for pull requests to be accepted; this also includes [commit message format](https://wiki.openttd.org/Coding_style#Commit_message).
|
||||
|
||||
Adhering to the following process is the best way to get your work included in the project:
|
||||
|
||||
@@ -136,8 +134,8 @@ contain your feature, change, or fix:
|
||||
git checkout upstream/master -b <topic-branch-name>
|
||||
```
|
||||
|
||||
4. Commit your changes in logical chunks. Please adhere to these [git commit message guidelines](./CODINGSTYLE.md#commit-message) or your code is unlikely to be merged into the main project.
|
||||
Use Git's [interactive rebase](https://docs.github.com/en/get-started/using-git/about-git-rebase) feature to tidy up your commits before making them public.
|
||||
4. Commit your changes in logical chunks. Please adhere to these [git commit message guidelines](https://wiki.openttd.org/Commit_style#Commit_message) or your code is unlikely to be merged into the main project.
|
||||
Use Git's [interactive rebase](https://help.github.com/articles/interactive-rebase) feature to tidy up your commits before making them public.
|
||||
|
||||
5. Locally rebase the upstream development branch into your topic branch:
|
||||
|
||||
@@ -172,14 +170,14 @@ The results of the CI tests will show on your pull request.
|
||||
By clicking on Details you can further zoom in; in case of a failure it will show you why it failed.
|
||||
In case of success it will report how awesome you were.
|
||||
|
||||
Tip: [commit message format](./CODINGSTYLE.md#commit-message) is a common reason for pull requests to fail validation.
|
||||
Tip: [commit message format](https://wiki.openttd.org/Coding_style#Commit_message) is a common reason for pull requests to fail validation.
|
||||
|
||||
|
||||
### Are there any development docs?
|
||||
|
||||
There is no single source for OpenTTD development docs. It's a complex project with a long history, and multiple APIs.
|
||||
|
||||
A good entry point is [Development](https://wiki.openttd.org/en/Development/) on the OpenTTD wiki; this provides links to wiki documentation and other sources.
|
||||
A good entry point is [Development](https://wiki.openttd.org/Development) on the OpenTTD wiki; this provides links to wiki documentation and other sources.
|
||||
|
||||
The GitHub repo also includes some non-comprehensive documentation in [/docs](./docs).
|
||||
|
||||
@@ -212,8 +210,8 @@ When it comes to gameplay features there are at least these groups of interests:
|
||||
- *Control freak:* micromanagement like conditional orders, refitting and loading etc.
|
||||
- *Casual:* automatisation like cargodist, path based signalling etc.
|
||||
|
||||
To please everyone, the official branch tries to stay close to the original gameplay; after all, that is what brought everyone here.
|
||||
The preferred method to alter and extend the gameplay is via add-ons like NewGRF and GameScripts.
|
||||
To please everyone, the official branch tries to stay close to the original gameplay; after all, that is what everyone brought here.
|
||||
The preferred method to alter and extent the gameplay is via add-ons like NewGRF and GameScripts.
|
||||
|
||||
For a long time, the official branch was also open to features which could be enabled/disabled, but the corner-cases that came with some configurations have rendered some parts of the code very complicated.
|
||||
Today, new features have to work with all the already existing features, which is not only challenging in corner cases, but also requires spending considerable more work than just "making it work in the game mode that I play".
|
||||
@@ -265,5 +263,5 @@ If you would not like to accept this risk, please do either commit anonymously o
|
||||
|
||||
### Attribution of this Contributing Guide
|
||||
|
||||
This contributing guide is adapted from [Bootstrap](https://github.com/twbs/bootstrap/blob/main/.github/CONTRIBUTING.md) under the [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/) terms for Bootstrap documentation.
|
||||
This contributing guide is adapted from [Bootstrap](https://github.com/twbs/bootstrap/blob/master/CONTRIBUTING.md) under the [Creative Commons Attribution 3.0 Unported License](https://github.com/twbs/bootstrap/blob/master/docs/LICENSE) terms for Bootstrap documentation.
|
||||
The GDPR notice is adapted from [rsyslog](https://github.com/rsyslog/rsyslog/blob/master/CONTRIBUTING.md) under the [GNU General Public License](https://github.com/rsyslog/rsyslog/blob/master/COPYING).
|
||||
|
@@ -1,12 +0,0 @@
|
||||
# Make the current version available to CPack
|
||||
set(CPACK_PACKAGE_VERSION "@REV_VERSION@")
|
||||
|
||||
# Name the output file with the correct version
|
||||
string(REPLACE "#CPACK_PACKAGE_VERSION#" "@REV_VERSION@" CPACK_PACKAGE_FILE_NAME "${CPACK_PACKAGE_FILE_NAME}")
|
||||
|
||||
if (CPACK_BUNDLE_PLIST_SOURCE)
|
||||
# Rewrite the Info.plist.in to contain the correct version
|
||||
file(READ ${CPACK_BUNDLE_PLIST_SOURCE} INFO_PLIST_CONTENT)
|
||||
string(REPLACE "#CPACK_PACKAGE_VERSION#" "@REV_VERSION@" INFO_PLIST_CONTENT "${INFO_PLIST_CONTENT}")
|
||||
file(WRITE ${CPACK_BUNDLE_PLIST} "${INFO_PLIST_CONTENT}")
|
||||
endif (CPACK_BUNDLE_PLIST_SOURCE)
|
29
CREDITS.md
29
CREDITS.md
@@ -1,44 +1,43 @@
|
||||
### The OpenTTD team (in alphabetical order):
|
||||
|
||||
- Grzegorz Duczyński (adf88) - General coding (since 1.7.2)
|
||||
- Albert Hofkamp (Alberth) - GUI expert (since 0.7)
|
||||
- Matthijs Kooijman (blathijs) - Pathfinder-guru, Debian port (since 0.3)
|
||||
- Ulf Hermann (fonsinchen) - Cargo Distribution (since 1.3)
|
||||
- Christoph Elsenhans (frosch) - General coding (since 0.6)
|
||||
- Loïc Guilloux (glx) - General / Windows Expert (since 0.4.5)
|
||||
- Loïc Guilloux (glx) - Windows Expert (since 0.4.5)
|
||||
- Charles Pigott (LordAro) - General / Correctness police (since 1.9)
|
||||
- Michael Lutz (michi_cc) - General / Path based signals (since 0.7)
|
||||
- Michael Lutz (michi_cc) - Path based signals (since 0.7)
|
||||
- Niels Martin Hansen (nielsm) - Music system, general coding (since 1.9)
|
||||
- Owen Rudge (orudge) - Forum host, OS/2 port (since 0.1)
|
||||
- Peter Nelson (peter1138) - Spiritual descendant from newGRF gods (since 0.4.5)
|
||||
- Remko Bijker (Rubidium) - Coder and way more (since 0.4.5)
|
||||
- Patric Stout (TrueBrain) - NoProgrammer (since 0.3), sys op
|
||||
- Tyler Trahan (2TallTyler) - General coding (since 13)
|
||||
- Ingo von Borstel (planetmaker) - General coding, Support (since 1.1)
|
||||
- Remko Bijker (Rubidium) - Lead coder and way more (since 0.4.5)
|
||||
- José Soler (Terkhen) - General coding (since 1.0)
|
||||
- Leif Linse (Zuu) - AI/Game Script (since 1.2)
|
||||
|
||||
### Inactive Developers:
|
||||
|
||||
- Grzegorz Duczyński (adf88) - General coding (1.7 - 1.8)
|
||||
- Albert Hofkamp (Alberth) - GUI expert (0.7 - 1.9)
|
||||
- Jean-François Claeys (Belugas) - GUI, newindustries and more (0.4.5 - 1.0)
|
||||
- Bjarni Corfitzen (Bjarni) - macOS port, coder and vehicles (0.3 - 0.7)
|
||||
- Victor Fischer (Celestar) - Programming everywhere you need him to (0.3 - 0.6)
|
||||
- Ulf Hermann (fonsinchen) - Cargo Distribution (1.3 - 1.6)
|
||||
- Jaroslav Mazanec (KUDr) - YAPG (Yet Another Pathfinder God) ;) (0.4.5 - 0.6)
|
||||
- Jonathan Coome (Maedhros) - High priest of the NewGRF Temple (0.5 - 0.6)
|
||||
- Attila Bán (MiHaMiX) - Developer WebTranslator 1 and 2 (0.3 - 0.5)
|
||||
- Ingo von Borstel (planetmaker) - General coding, Support (1.1 - 1.9)
|
||||
- Attila Bán (MiHaMiX) - WebTranslator 1 and 2 (0.3 - 0.5)
|
||||
- Zdeněk Sojka (SmatZ) - Bug finder and fixer (0.6 - 1.3)
|
||||
- José Soler (Terkhen) - General coding (1.0 - 1.4)
|
||||
- Christoph Mallon (Tron) - Programmer, code correctness police (0.3 - 0.5)
|
||||
- Patric Stout (TrueBrain) - NoProgrammer (0.3 - 1.2), sys op (active)
|
||||
- Thijs Marinussen (Yexo) - AI Framework, General (0.6 - 1.3)
|
||||
- Leif Linse (Zuu) - AI/Game Script (1.2 - 1.6)
|
||||
|
||||
### Retired Developers:
|
||||
|
||||
- Tamás Faragó (Darkvater) - Ex-Lead coder (0.3 - 0.5)
|
||||
- Dominik Scherer (dominik81) - Lead programmer, GUI expert (0.3 - 0.3)
|
||||
- Emil Djupfeld (egladil) - macOS port (0.4.5 - 0.6)
|
||||
- Emil Djupfeld (egladil) - macOS port (0.4 - 0.6)
|
||||
- Simon Sasburg (HackyKid) - Bug fixer (0.4 - 0.4.5)
|
||||
- Ludvig Strigeus (ludde) - Original author of OpenTTD, main coder (0.1 - 0.3)
|
||||
- Cian Duffy (MYOB) - BeOS port / manual writing (0.1 - 0.3)
|
||||
- Petr Baudiš (pasky) - Many patches, newgrf support (0.3 - 0.3)
|
||||
- Petr Baudiš (pasky) - Many patches, newgrf support, etc. (0.3 - 0.3)
|
||||
- Benedikt Brüggemeier (skidd13) - Bug fixer and code reworker (0.6 - 0.7)
|
||||
- Serge Paquet (vurlix) - 2nd contributor after ludde (0.1 - 0.3)
|
||||
|
||||
@@ -53,7 +52,7 @@
|
||||
- Alberto Demichelis - Squirrel scripting language
|
||||
- L. Peter Deutsch - MD5 implementation
|
||||
- Michael Blunck - For revolutionizing TTD with awesome graphics
|
||||
- George - Canal/Lock graphics
|
||||
- George - Canal graphics
|
||||
- Andrew Parkhouse (andythenorth) - River graphics
|
||||
- David Dallaston (Pikka) - Tram tracks
|
||||
- All Translators - For their support to make OpenTTD a truly international game
|
||||
|
@@ -8,10 +8,10 @@
|
||||
#---------------------------------------------------------------------------
|
||||
DOXYFILE_ENCODING = UTF-8
|
||||
PROJECT_NAME = "OpenTTD Source"
|
||||
PROJECT_NUMBER = ${REV_VERSION}
|
||||
PROJECT_NUMBER = $(VERSION)
|
||||
PROJECT_BRIEF =
|
||||
PROJECT_LOGO =
|
||||
OUTPUT_DIRECTORY = ${CPACK_BINARY_DIR}/docs/source/
|
||||
OUTPUT_DIRECTORY = docs/source/
|
||||
CREATE_SUBDIRS = YES
|
||||
ALLOW_UNICODE_NAMES = NO
|
||||
OUTPUT_LANGUAGE = English
|
||||
@@ -41,6 +41,7 @@ INHERIT_DOCS = YES
|
||||
SEPARATE_MEMBER_PAGES = NO
|
||||
TAB_SIZE = 2
|
||||
ALIASES =
|
||||
TCL_SUBST =
|
||||
OPTIMIZE_OUTPUT_FOR_C = YES
|
||||
OPTIMIZE_OUTPUT_JAVA = NO
|
||||
OPTIMIZE_FOR_FORTRAN = NO
|
||||
@@ -154,6 +155,7 @@ VERBATIM_HEADERS = YES
|
||||
# Configuration options related to the alphabetical class index
|
||||
#---------------------------------------------------------------------------
|
||||
ALPHABETICAL_INDEX = NO
|
||||
COLS_IN_ALPHA_INDEX = 5
|
||||
IGNORE_PREFIX =
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the HTML output
|
||||
@@ -222,7 +224,7 @@ LATEX_OUTPUT = latex
|
||||
LATEX_CMD_NAME = latex
|
||||
MAKEINDEX_CMD_NAME = makeindex
|
||||
COMPACT_LATEX = NO
|
||||
PAPER_TYPE = a4
|
||||
PAPER_TYPE = a4wide
|
||||
EXTRA_PACKAGES =
|
||||
LATEX_HEADER =
|
||||
LATEX_FOOTER =
|
||||
@@ -292,8 +294,8 @@ PREDEFINED = WITH_ZLIB \
|
||||
WITH_PNG \
|
||||
WITH_FONTCONFIG \
|
||||
WITH_FREETYPE \
|
||||
WITH_HARFBUZZ \
|
||||
WITH_ICU_I18N \
|
||||
WITH_ICU_LX \
|
||||
UNICODE \
|
||||
_UNICODE \
|
||||
_GNU_SOURCE \
|
||||
@@ -304,14 +306,16 @@ SKIP_FUNCTION_MACROS = YES
|
||||
# Configuration options related to external references
|
||||
#---------------------------------------------------------------------------
|
||||
TAGFILES =
|
||||
GENERATE_TAGFILE = ${CPACK_BINARY_DIR}/docs/openttd.tag
|
||||
GENERATE_TAGFILE = objs/openttd.tag
|
||||
ALLEXTERNALS = NO
|
||||
EXTERNAL_GROUPS = YES
|
||||
EXTERNAL_PAGES = YES
|
||||
PERL_PATH = /usr/bin/perl
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the dot tool
|
||||
#---------------------------------------------------------------------------
|
||||
CLASS_DIAGRAMS = YES
|
||||
MSCGEN_PATH =
|
||||
DIA_PATH =
|
||||
HIDE_UNDOC_RELATIONS = YES
|
||||
HAVE_DOT = NO
|
221
Makefile.bundle.in
Normal file
221
Makefile.bundle.in
Normal file
@@ -0,0 +1,221 @@
|
||||
# This file is part of OpenTTD.
|
||||
# OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
# OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
#
|
||||
# Creation of bundles
|
||||
#
|
||||
|
||||
# The revision is needed for the bundle name and creating an OSX application bundle.
|
||||
# Detect the revision
|
||||
VERSIONS := $(shell AWK="$(AWK)" "$(ROOT_DIR)/findversion.sh")
|
||||
VERSION := $(shell echo "$(VERSIONS)" | cut -f 1 -d' ')
|
||||
|
||||
# Make sure we have something in VERSION
|
||||
ifeq ($(VERSION),)
|
||||
VERSION := norev000
|
||||
endif
|
||||
|
||||
ifndef BUNDLE_NAME
|
||||
BUNDLE_NAME = openttd-custom-$(VERSION)-$(OS)
|
||||
endif
|
||||
|
||||
# An OSX application bundle needs the data files, lang files and openttd executable in a different location.
|
||||
ifdef OSXAPP
|
||||
AI_DIR = $(BUNDLE_DIR)/$(OSXAPP)/Contents/Resources/ai
|
||||
GAME_DIR = $(BUNDLE_DIR)/$(OSXAPP)/Contents/Resources/game
|
||||
BASESET_DIR = $(BUNDLE_DIR)/$(OSXAPP)/Contents/Resources/baseset
|
||||
LANG_DIR = $(BUNDLE_DIR)/$(OSXAPP)/Contents/Resources/lang
|
||||
TTD_DIR = $(BUNDLE_DIR)/$(OSXAPP)/Contents/MacOS
|
||||
else
|
||||
AI_DIR = $(BUNDLE_DIR)/ai
|
||||
GAME_DIR = $(BUNDLE_DIR)/game
|
||||
BASESET_DIR = $(BUNDLE_DIR)/baseset
|
||||
LANG_DIR = $(BUNDLE_DIR)/lang
|
||||
TTD_DIR = $(BUNDLE_DIR)
|
||||
endif
|
||||
|
||||
bundle: all
|
||||
@echo '[BUNDLE] Constructing bundle'
|
||||
$(Q)rm -rf "$(BUNDLE_DIR)"
|
||||
$(Q)mkdir -p "$(BUNDLE_DIR)"
|
||||
$(Q)mkdir -p "$(BUNDLE_DIR)/docs"
|
||||
$(Q)mkdir -p "$(BUNDLE_DIR)/media"
|
||||
$(Q)mkdir -p "$(BUNDLE_DIR)/scripts"
|
||||
$(Q)mkdir -p "$(TTD_DIR)"
|
||||
$(Q)mkdir -p "$(AI_DIR)"
|
||||
$(Q)mkdir -p "$(GAME_DIR)"
|
||||
$(Q)mkdir -p "$(BASESET_DIR)"
|
||||
$(Q)mkdir -p "$(LANG_DIR)"
|
||||
ifdef OSXAPP
|
||||
$(Q)mkdir -p "$(BUNDLE_DIR)/$(OSXAPP)/Contents/Resources"
|
||||
$(Q)echo "APPL????" > "$(BUNDLE_DIR)/$(OSXAPP)/Contents/PkgInfo"
|
||||
$(Q)cp "$(ROOT_DIR)/os/macosx/openttd.icns" "$(BUNDLE_DIR)/$(OSXAPP)/Contents/Resources/openttd.icns"
|
||||
$(Q)$(ROOT_DIR)/os/macosx/plistgen.sh "$(BUNDLE_DIR)/$(OSXAPP)" "$(VERSION)"
|
||||
$(Q)cp "$(ROOT_DIR)/os/macosx/splash.png" "$(BASESET_DIR)"
|
||||
endif
|
||||
ifeq ($(OS),UNIX)
|
||||
$(Q)cp "$(ROOT_DIR)/media/openttd.32.bmp" "$(BASESET_DIR)/"
|
||||
endif
|
||||
$(Q)cp "$(BIN_DIR)/$(TTD)" "$(TTD_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/ai/"compat_*.nut "$(AI_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/game/"compat_*.nut "$(GAME_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/baseset/"*.grf "$(BASESET_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/baseset/"*.obg "$(BASESET_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/baseset/"*.obs "$(BASESET_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/baseset/opntitle.dat" "$(BASESET_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/baseset/"*.obm "$(BASESET_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/lang/"*.lng "$(LANG_DIR)/"
|
||||
$(Q)cp "$(ROOT_DIR)/README.md" "$(BUNDLE_DIR)/"
|
||||
$(Q)cp "$(ROOT_DIR)/COPYING.md" "$(BUNDLE_DIR)/"
|
||||
$(Q)cp "$(ROOT_DIR)/known-bugs.txt" "$(BUNDLE_DIR)/"
|
||||
$(Q)cp "$(ROOT_DIR)/docs/multiplayer.md" "$(BUNDLE_DIR)/docs/"
|
||||
$(Q)cp "$(ROOT_DIR)/changelog.txt" "$(BUNDLE_DIR)/"
|
||||
ifdef MAN_DIR
|
||||
$(Q)mkdir -p "$(BUNDLE_DIR)/man/"
|
||||
$(Q)cp "$(ROOT_DIR)/docs/openttd.6" "$(BUNDLE_DIR)/man/"
|
||||
$(Q)gzip -9 "$(BUNDLE_DIR)/man/openttd.6"
|
||||
endif
|
||||
$(Q)cp "$(ROOT_DIR)/media/openttd.32.xpm" "$(BUNDLE_DIR)/media/"
|
||||
$(Q)cp "$(ROOT_DIR)/media/openttd."*.png "$(BUNDLE_DIR)/media/"
|
||||
$(Q)cp "$(BIN_DIR)/scripts/"* "$(BUNDLE_DIR)/scripts/"
|
||||
ifdef MENU_DIR
|
||||
$(Q)cp "$(ROOT_DIR)/media/openttd.desktop" "$(BUNDLE_DIR)/media/"
|
||||
$(Q)$(AWK) -f "$(ROOT_DIR)/media/openttd.desktop.translation.awk" "$(SRC_DIR)/lang/"*.txt | LC_ALL=C $(SORT) | $(AWK) -f "$(ROOT_DIR)/media/openttd.desktop.filter.awk" >> "$(BUNDLE_DIR)/media/openttd.desktop"
|
||||
$(Q)sed s/=openttd/=$(BINARY_NAME)/g "$(BUNDLE_DIR)/media/openttd.desktop" > "$(ROOT_DIR)/media/openttd.desktop.install"
|
||||
endif
|
||||
ifeq ($(TTD), openttd.exe)
|
||||
$(Q)unix2dos "$(BUNDLE_DIR)/docs/"* "$(BUNDLE_DIR)/README.md" "$(BUNDLE_DIR)/COPYING.md" "$(BUNDLE_DIR)/changelog.txt" "$(BUNDLE_DIR)/known-bugs.txt"
|
||||
endif
|
||||
|
||||
### Packing the current bundle into several compressed file formats ###
|
||||
#
|
||||
# Zips & dmgs do not contain a root folder, i.e. they have files in the root of the zip/dmg.
|
||||
# gzip, bzip2 and lha archives have a root folder, with the same name as the bundle.
|
||||
#
|
||||
# One can supply a custom name by adding BUNDLE_NAME:=<name> to the make command.
|
||||
#
|
||||
bundle_zip: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).zip'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)"
|
||||
$(Q)cd "$(BUNDLE_DIR)" && zip -r $(shell if test -z "$(VERBOSE)"; then echo '-q'; fi) "$(BUNDLES_DIR)/$(BUNDLE_NAME).zip" .
|
||||
|
||||
bundle_7z: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).7z'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)"
|
||||
$(Q)cd "$(BUNDLE_DIR)" && 7z a "$(BUNDLES_DIR)/$(BUNDLE_NAME).7z" .
|
||||
|
||||
bundle_gzip: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).tar.gz'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)/.gzip/$(BUNDLE_NAME)"
|
||||
$(Q)cp -R "$(BUNDLE_DIR)/"* "$(BUNDLES_DIR)/.gzip/$(BUNDLE_NAME)/"
|
||||
$(Q)cd "$(BUNDLES_DIR)/.gzip" && tar -zc$(shell if test -n "$(VERBOSE)"; then echo 'v'; fi)f "$(BUNDLES_DIR)/$(BUNDLE_NAME).tar.gz" "$(BUNDLE_NAME)"
|
||||
$(Q)rm -rf "$(BUNDLES_DIR)/.gzip"
|
||||
|
||||
bundle_bzip2: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).tar.bz2'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)/.bzip2/$(BUNDLE_NAME)"
|
||||
$(Q)cp -R "$(BUNDLE_DIR)/"* "$(BUNDLES_DIR)/.bzip2/$(BUNDLE_NAME)/"
|
||||
$(Q)cd "$(BUNDLES_DIR)/.bzip2" && tar -jc$(shell if test -n "$(VERBOSE)"; then echo 'v'; fi)f "$(BUNDLES_DIR)/$(BUNDLE_NAME).tar.bz2" "$(BUNDLE_NAME)"
|
||||
$(Q)rm -rf "$(BUNDLES_DIR)/.bzip2"
|
||||
|
||||
bundle_lzma: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).tar.lzma'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)/.lzma/$(BUNDLE_NAME)"
|
||||
$(Q)cp -R "$(BUNDLE_DIR)/"* "$(BUNDLES_DIR)/.lzma/$(BUNDLE_NAME)/"
|
||||
$(Q)cd "$(BUNDLES_DIR)/.lzma" && tar --lzma -c$(shell if test -n "$(VERBOSE)"; then echo 'v'; fi)f "$(BUNDLES_DIR)/$(BUNDLE_NAME).tar.lzma" "$(BUNDLE_NAME)"
|
||||
$(Q)rm -rf "$(BUNDLES_DIR)/.lzma"
|
||||
|
||||
bundle_xz: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).tar.xz'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)/.xz/$(BUNDLE_NAME)"
|
||||
$(Q)cp -R "$(BUNDLE_DIR)/"* "$(BUNDLES_DIR)/.xz/$(BUNDLE_NAME)/"
|
||||
$(Q)cd "$(BUNDLES_DIR)/.xz" && tar --xz -c$(shell if test -n "$(VERBOSE)"; then echo 'v'; fi)f "$(BUNDLES_DIR)/$(BUNDLE_NAME).tar.xz" "$(BUNDLE_NAME)"
|
||||
$(Q)rm -rf "$(BUNDLES_DIR)/.xz"
|
||||
|
||||
bundle_lha: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).lha'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)/.lha/$(BUNDLE_NAME)"
|
||||
$(Q)cp -R "$(BUNDLE_DIR)/"* "$(BUNDLES_DIR)/.lha/$(BUNDLE_NAME)/"
|
||||
$(Q)cd "$(BUNDLES_DIR)/.lha" && lha ao6 "$(BUNDLES_DIR)/$(BUNDLE_NAME).lha" "$(BUNDLE_NAME)"
|
||||
$(Q)rm -rf "$(BUNDLES_DIR)/.lha"
|
||||
|
||||
bundle_dmg: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).dmg'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)/OpenTTD $(VERSION)"
|
||||
$(Q)cp -R "$(BUNDLE_DIR)/" "$(BUNDLES_DIR)/OpenTTD $(VERSION)"
|
||||
$(Q)hdiutil create -ov -format UDZO -srcfolder "$(BUNDLES_DIR)/OpenTTD $(VERSION)" "$(BUNDLES_DIR)/$(BUNDLE_NAME).dmg"
|
||||
$(Q)rm -fr "$(BUNDLES_DIR)/OpenTTD $(VERSION)"
|
||||
|
||||
bundle_exe: all
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).exe'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)"
|
||||
$(Q)unix2dos "$(ROOT_DIR)/docs/"* "$(ROOT_DIR)/README.md" "$(ROOT_DIR)/COPYING.md" "$(ROOT_DIR)/changelog.txt" "$(ROOT_DIR)/known-bugs.txt"
|
||||
$(Q)cd $(ROOT_DIR)/os/windows/installer && makensis.exe //DVERSION_INCLUDE=version_$(PLATFORM).txt install.nsi
|
||||
$(Q)mv $(ROOT_DIR)/os/windows/installer/*$(PLATFORM).exe "$(BUNDLES_DIR)/$(BUNDLE_NAME).exe"
|
||||
|
||||
ifdef OSXAPP
|
||||
install:
|
||||
@echo '[INSTALL] Cannot install the OSX Application Bundle'
|
||||
else
|
||||
install: bundle
|
||||
@echo '[INSTALL] Installing OpenTTD'
|
||||
$(Q)install -d "$(INSTALL_BINARY_DIR)"
|
||||
$(Q)install -d "$(INSTALL_ICON_DIR)"
|
||||
$(Q)install -d "$(INSTALL_DATA_DIR)/ai"
|
||||
$(Q)install -d "$(INSTALL_DATA_DIR)/game"
|
||||
$(Q)install -d "$(INSTALL_DATA_DIR)/baseset"
|
||||
$(Q)install -d "$(INSTALL_DATA_DIR)/lang"
|
||||
$(Q)install -d "$(INSTALL_DATA_DIR)/scripts"
|
||||
ifeq ($(TTD), openttd.exe)
|
||||
$(Q)install -m 755 "$(BUNDLE_DIR)/$(TTD)" "$(INSTALL_BINARY_DIR)/${BINARY_NAME}.exe"
|
||||
else
|
||||
$(Q)install -m 755 "$(BUNDLE_DIR)/$(TTD)" "$(INSTALL_BINARY_DIR)/${BINARY_NAME}"
|
||||
endif
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/lang/"* "$(INSTALL_DATA_DIR)/lang"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/ai/"* "$(INSTALL_DATA_DIR)/ai"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/game/"* "$(INSTALL_DATA_DIR)/game"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/baseset/"* "$(INSTALL_DATA_DIR)/baseset"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/scripts/"* "$(INSTALL_DATA_DIR)/scripts"
|
||||
ifndef DO_NOT_INSTALL_DOCS
|
||||
$(Q)install -d "$(INSTALL_DOC_DIR)"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/docs/"* "$(BUNDLE_DIR)/README.md" "$(BUNDLE_DIR)/known-bugs.txt" "$(INSTALL_DOC_DIR)"
|
||||
endif
|
||||
ifndef DO_NOT_INSTALL_CHANGELOG
|
||||
$(Q)install -d "$(INSTALL_DOC_DIR)"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/changelog.txt" "$(INSTALL_DOC_DIR)"
|
||||
endif
|
||||
ifndef DO_NOT_INSTALL_LICENSE
|
||||
$(Q)install -d "$(INSTALL_DOC_DIR)"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/COPYING.md" "$(INSTALL_DOC_DIR)"
|
||||
endif
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/openttd.32.xpm" "$(INSTALL_ICON_DIR)/${BINARY_NAME}.32.xpm"
|
||||
ifdef ICON_THEME_DIR
|
||||
$(Q)install -d "$(INSTALL_ICON_THEME_DIR)"
|
||||
$(Q)install -d "$(INSTALL_ICON_THEME_DIR)/16x16/apps"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/openttd.16.png" "$(INSTALL_ICON_THEME_DIR)/16x16/apps/${BINARY_NAME}.png"
|
||||
$(Q)install -d "$(INSTALL_ICON_THEME_DIR)/32x32/apps"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/openttd.32.png" "$(INSTALL_ICON_THEME_DIR)/32x32/apps/${BINARY_NAME}.png"
|
||||
$(Q)install -d "$(INSTALL_ICON_THEME_DIR)/48x48/apps"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/openttd.48.png" "$(INSTALL_ICON_THEME_DIR)/48x48/apps/${BINARY_NAME}.png"
|
||||
$(Q)install -d "$(INSTALL_ICON_THEME_DIR)/64x64/apps"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/openttd.64.png" "$(INSTALL_ICON_THEME_DIR)/64x64/apps/${BINARY_NAME}.png"
|
||||
$(Q)install -d "$(INSTALL_ICON_THEME_DIR)/128x128/apps"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/openttd.128.png" "$(INSTALL_ICON_THEME_DIR)/128x128/apps/${BINARY_NAME}.png"
|
||||
$(Q)install -d "$(INSTALL_ICON_THEME_DIR)/256x256/apps"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/openttd.256.png" "$(INSTALL_ICON_THEME_DIR)/256x256/apps/${BINARY_NAME}.png"
|
||||
else
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/"*.png "$(INSTALL_ICON_DIR)"
|
||||
endif
|
||||
ifdef MAN_DIR
|
||||
ifndef DO_NOT_INSTALL_MAN
|
||||
$(Q)install -d "$(INSTALL_MAN_DIR)"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/man/openttd.6.gz" "$(INSTALL_MAN_DIR)/${BINARY_NAME}.6.gz"
|
||||
endif
|
||||
endif
|
||||
ifdef MENU_DIR
|
||||
$(Q)install -d "$(INSTALL_MENU_DIR)"
|
||||
$(Q)install -m 644 "$(ROOT_DIR)/media/openttd.desktop.install" "$(INSTALL_MENU_DIR)/${BINARY_NAME}.desktop"
|
||||
endif
|
||||
endif # OSXAPP
|
116
Makefile.grf.in
Normal file
116
Makefile.grf.in
Normal file
@@ -0,0 +1,116 @@
|
||||
# This file is part of OpenTTD.
|
||||
# OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
# OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# Building requires GRFCodec.
|
||||
#
|
||||
# Recent versions (including sources) can be found at:
|
||||
# http://www.openttd.org/download-grfcodec
|
||||
#
|
||||
# The mercurial repository can be found at:
|
||||
# http://hg.openttdcoop.org/grfcodec
|
||||
#
|
||||
|
||||
|
||||
ROOT_DIR = !!ROOT_DIR!!
|
||||
GRF_DIR = $(ROOT_DIR)/media/extra_grf
|
||||
BASESET_DIR = $(ROOT_DIR)/media/baseset
|
||||
LANG_DIR = $(ROOT_DIR)/src/lang
|
||||
BIN_DIR = !!BIN_DIR!!/baseset
|
||||
OBJS_DIR = !!GRF_OBJS_DIR!!
|
||||
OS = !!OS!!
|
||||
STAGE = !!STAGE!!
|
||||
|
||||
# Check if we want to show what we are doing
|
||||
ifdef VERBOSE
|
||||
Q =
|
||||
E = @true
|
||||
else
|
||||
Q = @
|
||||
E = @echo
|
||||
endif
|
||||
|
||||
GRFCODEC := !!GRFCODEC!!
|
||||
NFORENUM := !!NFORENUM!!
|
||||
CC_BUILD := !!CC_BUILD!!
|
||||
MD5SUM := $(shell [ "$(OS)" = "OSX" ] && echo "md5 -r" || echo "md5sum")
|
||||
|
||||
# Some "should not be changed" settings.
|
||||
NFO_FILES := $(GRF_DIR)/*.nfo $(GRF_DIR)/rivers/*.nfo
|
||||
PNG_FILES := $(GRF_DIR)/*.png $(GRF_DIR)/rivers/*.png
|
||||
|
||||
# List of target files.
|
||||
OBT_FILES := $(BIN_DIR)/orig_dos.obg
|
||||
OBT_FILES += $(BIN_DIR)/orig_dos_de.obg
|
||||
OBT_FILES += $(BIN_DIR)/orig_win.obg
|
||||
OBT_FILES += $(BIN_DIR)/orig_dos.obs
|
||||
OBT_FILES += $(BIN_DIR)/orig_win.obs
|
||||
OBT_FILES += $(BIN_DIR)/no_sound.obs
|
||||
OBT_FILES += $(BIN_DIR)/orig_dos.obm
|
||||
OBT_FILES += $(BIN_DIR)/orig_win.obm
|
||||
OBT_FILES += $(BIN_DIR)/no_music.obm
|
||||
OBT_FILES += $(BIN_DIR)/orig_tto.obm
|
||||
|
||||
# Build the GRF.
|
||||
all: $(OBT_FILES)
|
||||
ifdef GRFCODEC
|
||||
all: $(BIN_DIR)/openttd.grf $(BIN_DIR)/orig_extra.grf
|
||||
endif
|
||||
|
||||
$(OBJS_DIR)/langfiles.tmp: $(LANG_DIR)/*.txt
|
||||
$(E) '$(STAGE) Collecting baseset translations'
|
||||
$(Q) cat $^ > $@
|
||||
|
||||
$(BIN_DIR)/%.obg: $(BASESET_DIR)/%.obg $(BIN_DIR)/orig_extra.grf $(OBJS_DIR)/langfiles.tmp $(BASESET_DIR)/translations.awk
|
||||
$(E) '$(STAGE) Updating $(notdir $@)'
|
||||
$(Q) sed 's/^ORIG_EXTRA.GRF = *[0-9a-f]*$$/ORIG_EXTRA.GRF = '`$(MD5SUM) $(BIN_DIR)/orig_extra.grf | sed 's@ .*@@'`'/' $< > $@.tmp
|
||||
$(Q) awk -v langfiles='$(OBJS_DIR)/langfiles.tmp' -f $(BASESET_DIR)/translations.awk $@.tmp >$@
|
||||
$(Q) rm $@.tmp
|
||||
|
||||
$(BIN_DIR)/%.obs: $(BASESET_DIR)/%.obs $(OBJS_DIR)/langfiles.tmp $(BASESET_DIR)/translations.awk
|
||||
$(E) '$(STAGE) Updating $(notdir $@)'
|
||||
$(Q) awk -v langfiles='$(OBJS_DIR)/langfiles.tmp' -f $(BASESET_DIR)/translations.awk $< >$@
|
||||
|
||||
$(BIN_DIR)/%.obm: $(BASESET_DIR)/%.obm $(OBJS_DIR)/langfiles.tmp $(BASESET_DIR)/translations.awk
|
||||
$(E) '$(STAGE) Updating $(notdir $@)'
|
||||
$(Q) awk -v langfiles='$(OBJS_DIR)/langfiles.tmp' -f $(BASESET_DIR)/translations.awk $< >$@
|
||||
|
||||
# Guard against trying to run GRFCODEC/NFORENUM without either being set.
|
||||
ifdef GRFCODEC
|
||||
ifdef NFORENUM
|
||||
|
||||
# Compile extra grf
|
||||
$(BIN_DIR)/openttd.grf: $(PNG_FILES) $(NFO_FILES) $(GRF_DIR)/assemble_nfo.awk
|
||||
$(E) '$(STAGE) Assembling openttd.nfo'
|
||||
$(Q)-mkdir -p $(OBJS_DIR)/sprites
|
||||
$(Q)-cp $(PNG_FILES) $(OBJS_DIR)/sprites 2> /dev/null
|
||||
$(Q) awk -f $(GRF_DIR)/assemble_nfo.awk $(GRF_DIR)/openttd.nfo > $(OBJS_DIR)/sprites/openttd.nfo
|
||||
$(Q) $(NFORENUM) -s $(OBJS_DIR)/sprites/openttd.nfo
|
||||
$(E) '$(STAGE) Compiling openttd.grf'
|
||||
$(Q) $(GRFCODEC) -n -s -e -p1 $(OBJS_DIR)/openttd.grf
|
||||
$(Q)cp $(OBJS_DIR)/openttd.grf $(BIN_DIR)/openttd.grf
|
||||
|
||||
# The copy operation of PNG_FILES is duplicated from the target 'openttd.grf', thus those targets may not run in parallel.
|
||||
$(BIN_DIR)/orig_extra.grf: $(PNG_FILES) $(NFO_FILES) $(GRF_DIR)/assemble_nfo.awk | $(BIN_DIR)/openttd.grf
|
||||
$(E) '$(STAGE) Assembling orig_extra.nfo'
|
||||
$(Q)-mkdir -p $(OBJS_DIR)/sprites
|
||||
$(Q)-cp $(PNG_FILES) $(OBJS_DIR)/sprites 2> /dev/null
|
||||
$(Q) awk -f $(GRF_DIR)/assemble_nfo.awk $(GRF_DIR)/orig_extra.nfo > $(OBJS_DIR)/sprites/orig_extra.nfo
|
||||
$(Q) $(NFORENUM) -s $(OBJS_DIR)/sprites/orig_extra.nfo
|
||||
$(E) '$(STAGE) Compiling orig_extra.grf'
|
||||
$(Q) $(GRFCODEC) -n -s -e -p1 $(OBJS_DIR)/orig_extra.grf
|
||||
$(Q)cp $(OBJS_DIR)/orig_extra.grf $(BIN_DIR)/orig_extra.grf
|
||||
|
||||
endif
|
||||
endif
|
||||
|
||||
# Clean up temporary files.
|
||||
clean:
|
||||
$(Q)rm -f *.bak *.grf $(OBT_FILES)
|
||||
|
||||
# Clean up temporary files
|
||||
mrproper: clean
|
||||
$(Q)rm -fr sprites
|
||||
|
||||
.PHONY: all mrproper depend clean
|
185
Makefile.in
Normal file
185
Makefile.in
Normal file
@@ -0,0 +1,185 @@
|
||||
# This file is part of OpenTTD.
|
||||
# OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
# OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Check if we want to show what we are doing
|
||||
ifdef VERBOSE
|
||||
Q =
|
||||
else
|
||||
Q = @
|
||||
endif
|
||||
|
||||
include Makefile.am
|
||||
|
||||
CONFIG_CACHE_PWD = !!CONFIG_CACHE_PWD!!
|
||||
CONFIG_CACHE_SOURCE_LIST = !!CONFIG_CACHE_SOURCE_LIST!!
|
||||
BIN_DIR = !!BIN_DIR!!
|
||||
ICON_THEME_DIR = !!ICON_THEME_DIR!!
|
||||
MAN_DIR = !!MAN_DIR!!
|
||||
MENU_DIR = !!MENU_DIR!!
|
||||
SRC_DIR = !!SRC_DIR!!
|
||||
ROOT_DIR = !!ROOT_DIR!!
|
||||
BUNDLE_DIR = "$(ROOT_DIR)/bundle"
|
||||
BUNDLES_DIR = "$(ROOT_DIR)/bundles"
|
||||
INSTALL_DIR = !!INSTALL_DIR!!
|
||||
INSTALL_BINARY_DIR = "$(INSTALL_DIR)/"!!BINARY_DIR!!
|
||||
INSTALL_MAN_DIR = "$(INSTALL_DIR)/$(MAN_DIR)"
|
||||
INSTALL_MENU_DIR = "$(INSTALL_DIR)/$(MENU_DIR)"
|
||||
INSTALL_ICON_DIR = "$(INSTALL_DIR)/"!!ICON_DIR!!
|
||||
INSTALL_ICON_THEME_DIR = "$(INSTALL_DIR)/$(ICON_THEME_DIR)"
|
||||
INSTALL_DATA_DIR = "$(INSTALL_DIR)/"!!DATA_DIR!!
|
||||
INSTALL_DOC_DIR = "$(INSTALL_DIR)/"!!DOC_DIR!!
|
||||
SOURCE_LIST = !!SOURCE_LIST!!
|
||||
CONFIGURE_FILES = !!CONFIGURE_FILES!!
|
||||
BINARY_NAME = !!BINARY_NAME!!
|
||||
STRIP = !!STRIP!!
|
||||
TTD = !!TTD!!
|
||||
TTDS = $(SRC_DIRS:%=%/$(TTD))
|
||||
OS = !!OS!!
|
||||
OSXAPP = !!OSXAPP!!
|
||||
LIPO = !!LIPO!!
|
||||
AWK = !!AWK!!
|
||||
SORT = !!SORT!!
|
||||
DISTCC = !!DISTCC!!
|
||||
|
||||
RES := $(shell if [ ! -f $(CONFIG_CACHE_PWD) ] || [ "`pwd`" != "`cat $(CONFIG_CACHE_PWD)`" ]; then echo "`pwd`" > $(CONFIG_CACHE_PWD); fi )
|
||||
RES := $(shell if [ ! -f $(CONFIG_CACHE_SOURCE_LIST) ] || [ -n "`cmp $(CONFIG_CACHE_SOURCE_LIST) $(SOURCE_LIST) 2>/dev/null`" ]; then cp $(SOURCE_LIST) $(CONFIG_CACHE_SOURCE_LIST); fi )
|
||||
|
||||
all: config.pwd config.cache
|
||||
ifdef DISTCC
|
||||
@if [ -z "`echo '$(MFLAGS)' | grep '\-j'`" ]; then echo; echo "WARNING: you enabled distcc support, but you don't seem to be using the -jN parameter"; echo; fi
|
||||
endif
|
||||
@for dir in $(DIRS); do \
|
||||
$(MAKE) -C $$dir all || exit 1; \
|
||||
done
|
||||
ifdef LIPO
|
||||
# Lipo is an OSX thing. If it is defined, it means we are building for universal,
|
||||
# and so we have have to combine the binaries into one big binary
|
||||
|
||||
# Remove the last binary made by the last compiled target
|
||||
$(Q)rm -f $(BIN_DIR)/$(TTD)
|
||||
# Make all the binaries into one
|
||||
$(Q)$(LIPO) -create -output $(BIN_DIR)/$(TTD) $(TTDS)
|
||||
endif
|
||||
|
||||
help:
|
||||
@echo "Available make commands:"
|
||||
@echo ""
|
||||
@echo "Compilation:"
|
||||
@echo " all compile the executable and the lang files"
|
||||
@echo " lang compile the lang files only"
|
||||
@echo "Clean up:"
|
||||
@echo " clean remove the files generated during compilation"
|
||||
@echo " mrproper remove the files generated during configuration and compilation"
|
||||
@echo "Run after compilation:"
|
||||
@echo " run execute openttd after the compilation"
|
||||
@echo " run-gdb execute openttd in debug mode after the compilation"
|
||||
@echo " run-prof execute openttd in profiling mode after the compilation"
|
||||
@echo "Installation:"
|
||||
@echo " install install the compiled files and the data-files after the compilation"
|
||||
@echo " bundle create the base for an installation bundle"
|
||||
@echo " bundle_zip create the zip installation bundle"
|
||||
@echo " bundle_gzip create the gzip installation bundle"
|
||||
@echo " bundle_bzip2 create the bzip2 installation bundle"
|
||||
@echo " bundle_lha create the lha installation bundle"
|
||||
@echo " bundle_dmg create the dmg installation bundle"
|
||||
|
||||
config.pwd: $(CONFIG_CACHE_PWD)
|
||||
$(MAKE) reconfigure
|
||||
|
||||
config.cache: $(CONFIG_CACHE_SOURCE_LIST) $(CONFIGURE_FILES)
|
||||
$(MAKE) reconfigure
|
||||
|
||||
reconfigure:
|
||||
ifeq ($(shell if test -f config.cache; then echo 1; fi), 1)
|
||||
@echo "----------------"
|
||||
@echo "The system detected that source.list or any configure file is altered."
|
||||
@echo " Going to reconfigure with last known settings..."
|
||||
@echo "----------------"
|
||||
# Make sure we don't lock config.cache
|
||||
@$(shell cat config.cache | sed 's@\\ @\\\\ @g') || exit 1
|
||||
@echo "----------------"
|
||||
@echo "Reconfig done. Please re-execute make."
|
||||
@echo "----------------"
|
||||
else
|
||||
@echo "----------------"
|
||||
@echo "Have not found a configuration, please run configure first."
|
||||
@echo "----------------"
|
||||
@exit 1
|
||||
endif
|
||||
|
||||
clean:
|
||||
@for dir in $(DIRS); do \
|
||||
$(MAKE) -C $$dir clean; \
|
||||
done
|
||||
$(Q)rm -rf $(BUNDLE_TARGET)
|
||||
|
||||
lang:
|
||||
@for dir in $(LANG_DIRS); do \
|
||||
$(MAKE) -C $$dir all; \
|
||||
done
|
||||
|
||||
mrproper:
|
||||
@for dir in $(DIRS); do \
|
||||
$(MAKE) -C $$dir mrproper; \
|
||||
done
|
||||
# Don't be tempted to merge these two for loops. Doing that breaks make
|
||||
# --dry-run, since make has this "feature" that it always runs commands
|
||||
# containing $(MAKE), even when --dry-run is passed. The objective is of
|
||||
# course to also get a dry-run of submakes, but make is not smart enough
|
||||
# to see that a for loop runs both a submake and an actual command.
|
||||
@for dir in $(DIRS); do \
|
||||
rm -f $$dir/Makefile; \
|
||||
done
|
||||
$(Q)rm -rf objs
|
||||
$(Q)rm -f Makefile Makefile.am Makefile.bundle
|
||||
$(Q)rm -f media/openttd.desktop media/openttd.desktop.install
|
||||
$(Q)rm -f $(CONFIG_CACHE_SOURCE_LIST) config.cache config.pwd config.log $(CONFIG_CACHE_PWD)
|
||||
# directories for bundle generation
|
||||
$(Q)rm -rf $(BUNDLE_DIR)
|
||||
$(Q)rm -rf $(BUNDLES_DIR)
|
||||
# output of profiling
|
||||
$(Q)rm -f $(BIN_DIR)/gmon.out
|
||||
# output of generating 'API' documentation
|
||||
$(Q)rm -rf $(ROOT_DIR)/docs/source
|
||||
$(Q)rm -rf $(ROOT_DIR)/docs/aidocs
|
||||
$(Q)rm -rf $(ROOT_DIR)/docs/gamedocs
|
||||
# directories created by OpenTTD on regression testing
|
||||
$(Q)rm -rf $(BIN_DIR)/ai/regression/content_download $(BIN_DIR)/ai/regression/save $(BIN_DIR)/ai/regression/scenario
|
||||
distclean: mrproper
|
||||
|
||||
maintainer-clean: distclean
|
||||
$(Q)rm -f $(BIN_DIR)/baseset/openttd.grf $(BIN_DIR)/baseset/orig_extra.grf $(BIN_DIR)/baseset/*.obg $(BIN_DIR)/baseset/*.obs $(BIN_DIR)/baseset/*.obm
|
||||
|
||||
depend:
|
||||
@for dir in $(SRC_DIRS); do \
|
||||
$(MAKE) -C $$dir depend; \
|
||||
done
|
||||
|
||||
run: all
|
||||
$(Q)cd !!BIN_DIR!! && ./!!TTD!! $(OPENTTD_ARGS)
|
||||
|
||||
run-gdb: all
|
||||
$(Q)cd !!BIN_DIR!! && gdb --ex run --args ./!!TTD!! $(OPENTTD_ARGS)
|
||||
|
||||
run-prof: all
|
||||
$(Q)cd !!BIN_DIR!! && ./!!TTD!! $(OPENTTD_ARGS) && gprof !!TTD!! | less
|
||||
|
||||
regression: all
|
||||
$(Q)cd !!BIN_DIR!! && sh ai/regression/run.sh
|
||||
test: regression
|
||||
|
||||
%.o:
|
||||
@for dir in $(SRC_DIRS); do \
|
||||
$(MAKE) -C $$dir $(@:src/%=%); \
|
||||
done
|
||||
|
||||
%.lng:
|
||||
@for dir in $(LANG_DIRS); do \
|
||||
$(MAKE) -C $$dir $@; \
|
||||
done
|
||||
|
||||
.PHONY: test distclean mrproper clean
|
||||
|
||||
include Makefile.bundle
|
87
Makefile.lang.in
Normal file
87
Makefile.lang.in
Normal file
@@ -0,0 +1,87 @@
|
||||
# This file is part of OpenTTD.
|
||||
# OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
# OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
STRGEN = !!STRGEN!!
|
||||
SRC_DIR = !!SRC_DIR!!
|
||||
LANG_DIR = !!LANG_DIR!!
|
||||
BIN_DIR = !!BIN_DIR!!
|
||||
LANGS_SRC = $(shell ls $(LANG_DIR)/*.txt)
|
||||
LANGS = $(LANGS_SRC:$(LANG_DIR)/%.txt=%.lng)
|
||||
CXX_BUILD = !!CXX_BUILD!!
|
||||
CFLAGS_BUILD = !!CFLAGS_BUILD!!
|
||||
CXXFLAGS_BUILD= !!CXXFLAGS_BUILD!!
|
||||
LDFLAGS_BUILD = !!LDFLAGS_BUILD!!
|
||||
STRGEN_FLAGS = !!STRGEN_FLAGS!!
|
||||
STAGE = !!STAGE!!
|
||||
LANG_SUPPRESS = !!LANG_SUPPRESS!!
|
||||
LANG_OBJS_DIR = !!LANG_OBJS_DIR!!
|
||||
|
||||
ifeq ($(LANG_SUPPRESS), yes)
|
||||
LANG_ERRORS = >/dev/null 2>&1
|
||||
endif
|
||||
|
||||
# Check if we want to show what we are doing
|
||||
ifdef VERBOSE
|
||||
Q =
|
||||
E = @true
|
||||
else
|
||||
Q = @
|
||||
E = @echo
|
||||
endif
|
||||
|
||||
RES := $(shell mkdir -p $(BIN_DIR)/lang )
|
||||
|
||||
all: table/strings.h $(LANGS)
|
||||
|
||||
strgen_base.o: $(SRC_DIR)/strgen/strgen_base.cpp $(SRC_DIR)/strgen/strgen.h $(SRC_DIR)/table/control_codes.h $(SRC_DIR)/table/strgen_tables.h $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSTRGEN -c -o $@ $<
|
||||
|
||||
strgen.o: $(SRC_DIR)/strgen/strgen.cpp $(SRC_DIR)/strgen/strgen.h $(SRC_DIR)/table/control_codes.h $(SRC_DIR)/table/strgen_tables.h $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSTRGEN -c -o $@ $<
|
||||
|
||||
string.o: $(SRC_DIR)/string.cpp $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSTRGEN -c -o $@ $<
|
||||
|
||||
alloc_func.o: $(SRC_DIR)/core/alloc_func.cpp $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSTRGEN -c -o $@ $<
|
||||
|
||||
getoptdata.o: $(SRC_DIR)/misc/getoptdata.cpp $(SRC_DIR)/misc/getoptdata.h $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/misc/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSTRGEN -c -o $@ $<
|
||||
|
||||
lang/english.txt: $(LANG_DIR)/english.txt
|
||||
$(Q)mkdir -p lang
|
||||
$(Q)cp $(LANG_DIR)/english.txt lang/english.txt
|
||||
|
||||
$(STRGEN): alloc_func.o string.o strgen_base.o strgen.o getoptdata.o
|
||||
$(E) '$(STAGE) Compiling and Linking $@'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) $(LDFLAGS_BUILD) $^ -o $@
|
||||
|
||||
table/strings.h: lang/english.txt $(STRGEN)
|
||||
$(E) '$(STAGE) Generating $@'
|
||||
@mkdir -p table
|
||||
$(Q)./$(STRGEN) -s $(LANG_DIR) -d table
|
||||
|
||||
$(LANGS): %.lng: $(LANG_DIR)/%.txt $(STRGEN) lang/english.txt
|
||||
$(E) '$(STAGE) Compiling language $(*F)'
|
||||
$(Q)./$(STRGEN) $(STRGEN_FLAGS) -s $(LANG_DIR) -d $(LANG_OBJS_DIR) $< $(LANG_ERRORS) && cp $@ $(BIN_DIR)/lang || true # Do not fail all languages when one fails
|
||||
|
||||
depend:
|
||||
|
||||
clean:
|
||||
$(E) '$(STAGE) Cleaning up language files'
|
||||
$(Q)rm -f strgen_base.o strgen.o string.o alloc_func.o getoptdata.o table/strings.h $(STRGEN) $(LANGS) $(LANGS:%=$(BIN_DIR)/lang/%) lang/english.*
|
||||
|
||||
mrproper: clean
|
||||
$(Q)rm -rf $(BIN_DIR)/lang
|
||||
|
||||
%.lng:
|
||||
@echo '$(STAGE) No such language: $(@:%.lng=%)'
|
||||
|
||||
.PHONY: all mrproper depend clean
|
45
Makefile.msvc
Normal file
45
Makefile.msvc
Normal file
@@ -0,0 +1,45 @@
|
||||
# This file is part of OpenTTD.
|
||||
# OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
# OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
#
|
||||
# Makefile for creating bundles of MSVC's binaries in the same way as we make
|
||||
# the zip bundles for ALL other OSes.
|
||||
#
|
||||
# Usage: make -f Makefile.msvc PLATFORM=[Win32|x64] BUNDLE_NAME=openttd-<version>-win[32|64]
|
||||
# or make -f Makefile.msvc PLATFORM=[Win32|x64] BUNDLE_NAME=OTTD-win[32|64]-nightly-<revision>
|
||||
#
|
||||
|
||||
# Check if we want to show what we are doing
|
||||
ifdef VERBOSE
|
||||
Q =
|
||||
else
|
||||
Q = @
|
||||
endif
|
||||
|
||||
AWK = "awk"
|
||||
ROOT_DIR := $(shell pwd)
|
||||
BIN_DIR = "$(ROOT_DIR)/bin"
|
||||
SRC_DIR = "$(ROOT_DIR)/src"
|
||||
BUNDLE_DIR = "$(ROOT_DIR)/bundle"
|
||||
BUNDLES_DIR = "$(ROOT_DIR)/bundles"
|
||||
TTD = openttd.exe
|
||||
PDB = openttd.pdb
|
||||
MODE = Release
|
||||
TARGET := $(shell echo $(PLATFORM) | sed "s@win64@x64@;s@win32@Win32@")
|
||||
|
||||
all:
|
||||
$(Q)cp objs/$(TARGET)/$(MODE)/$(TTD) $(BIN_DIR)/$(TTD)
|
||||
|
||||
include Makefile.bundle.in
|
||||
|
||||
bundle_pdb:
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).pdb.xz'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)"
|
||||
$(Q)cp objs/$(TARGET)/Release/$(PDB) $(BUNDLES_DIR)/$(BUNDLE_NAME).pdb
|
||||
$(Q)xz -9 $(BUNDLES_DIR)/$(BUNDLE_NAME).pdb
|
||||
|
||||
regression: all
|
||||
$(Q)cp bin/$(TTD) bin/openttd
|
||||
$(Q)cd bin && sh ai/regression/run.sh
|
63
Makefile.setting.in
Normal file
63
Makefile.setting.in
Normal file
@@ -0,0 +1,63 @@
|
||||
# This file is part of OpenTTD.
|
||||
# OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
# OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
SETTINGSGEN = !!SETTINGSGEN!!
|
||||
SRC_DIR = !!SRC_DIR!!
|
||||
CXX_BUILD = !!CXX_BUILD!!
|
||||
CFLAGS_BUILD = !!CFLAGS_BUILD!!
|
||||
CXXFLAGS_BUILD = !!CXXFLAGS_BUILD!!
|
||||
LDFLAGS_BUILD = !!LDFLAGS_BUILD!!
|
||||
STAGE = !!STAGE!!
|
||||
SETTING_OBJS_DIR = !!SETTING_OBJS_DIR!!
|
||||
|
||||
# Check if we want to show what we are doing
|
||||
ifdef VERBOSE
|
||||
Q =
|
||||
E = @true
|
||||
else
|
||||
Q = @
|
||||
E = @echo
|
||||
endif
|
||||
|
||||
all: table/settings.h
|
||||
|
||||
settingsgen.o: $(SRC_DIR)/settingsgen/settingsgen.cpp $(SRC_DIR)/string_func.h $(SRC_DIR)/strings_type.h $(SRC_DIR)/misc/getoptdata.h $(SRC_DIR)/ini_type.h $(SRC_DIR)/core/smallvec_type.hpp $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSETTINGSGEN -c -o $@ $<
|
||||
|
||||
alloc_func.o: $(SRC_DIR)/core/alloc_func.cpp $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSETTINGSGEN -c -o $@ $<
|
||||
|
||||
getoptdata.o: $(SRC_DIR)/misc/getoptdata.cpp $(SRC_DIR)/misc/getoptdata.h $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/misc/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSETTINGSGEN -c -o $@ $<
|
||||
|
||||
string.o: $(SRC_DIR)/string.cpp $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSETTINGSGEN -c -o $@ $<
|
||||
|
||||
ini_load.o: $(SRC_DIR)/ini_load.cpp $(SRC_DIR)/core/alloc_func.hpp $(SRC_DIR)/core/mem_func.hpp $(SRC_DIR)/ini_type.h $(SRC_DIR)/string_func.h $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSETTINGSGEN -c -o $@ $<
|
||||
|
||||
$(SETTINGSGEN): alloc_func.o string.o ini_load.o settingsgen.o getoptdata.o
|
||||
$(E) '$(STAGE) Compiling and Linking $@'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) $(LDFLAGS_BUILD) $^ -o $@
|
||||
|
||||
table/settings.h: $(SETTINGSGEN) $(SRC_DIR)/table/settings.h.preamble $(SRC_DIR)/table/settings.h.postamble $(SRC_DIR)/table/*.ini
|
||||
$(E) '$(STAGE) Generating $@'
|
||||
@mkdir -p table
|
||||
$(Q)./$(SETTINGSGEN) -o table/settings.h -b $(SRC_DIR)/table/settings.h.preamble -a $(SRC_DIR)/table/settings.h.postamble $(SRC_DIR)/table/*.ini
|
||||
|
||||
depend:
|
||||
|
||||
clean:
|
||||
$(E) '$(STAGE) Cleaning up settings files'
|
||||
$(Q)rm -f settingsgen.o alloc_func.o getoptdata.o string.o ini_load.o $(SETTINGSGEN) table/settings.h
|
||||
|
||||
mrproper: clean
|
||||
|
||||
.PHONY: all mrproper depend clean
|
295
Makefile.src.in
Normal file
295
Makefile.src.in
Normal file
@@ -0,0 +1,295 @@
|
||||
# This file is part of OpenTTD.
|
||||
# OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
# OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
CC_HOST = !!CC_HOST!!
|
||||
CXX_HOST = !!CXX_HOST!!
|
||||
CC_BUILD = !!CC_BUILD!!
|
||||
CXX_BUILD = !!CXX_BUILD!!
|
||||
WINDRES = !!WINDRES!!
|
||||
STRIP = !!STRIP!!
|
||||
CFLAGS = !!CFLAGS!!
|
||||
CFLAGS_BUILD = !!CFLAGS_BUILD!!
|
||||
CXXFLAGS = !!CXXFLAGS!!
|
||||
CXXFLAGS_BUILD = !!CXXFLAGS_BUILD!!
|
||||
LIBS = !!LIBS!!
|
||||
LDFLAGS = !!LDFLAGS!!
|
||||
LDFLAGS_BUILD = !!LDFLAGS_BUILD!!
|
||||
ROOT_DIR = !!ROOT_DIR!!
|
||||
BIN_DIR = !!BIN_DIR!!
|
||||
LANG_DIR = !!LANG_DIR!!
|
||||
SRC_OBJS_DIR = !!SRC_OBJS_DIR!!
|
||||
LANG_OBJS_DIR = !!LANG_OBJS_DIR!!
|
||||
SETTING_OBJS_DIR= !!SETTING_OBJS_DIR!!
|
||||
SRC_DIR = !!SRC_DIR!!
|
||||
SCRIPT_SRC_DIR = !!SCRIPT_SRC_DIR!!
|
||||
MEDIA_DIR = !!MEDIA_DIR!!
|
||||
TTD = !!TTD!!
|
||||
STRGEN = !!STRGEN!!
|
||||
DEPEND = !!DEPEND!!
|
||||
OS = !!OS!!
|
||||
STAGE = !!STAGE!!
|
||||
MAKEDEPEND = !!MAKEDEPEND!!
|
||||
CFLAGS_MAKEDEP = !!CFLAGS_MAKEDEP!!
|
||||
SORT = !!SORT!!
|
||||
AWK = !!AWK!!
|
||||
CONFIG_CACHE_COMPILER = $(SRC_OBJS_DIR)/!!CONFIG_CACHE_COMPILER!!
|
||||
CONFIG_CACHE_LINKER = $(SRC_OBJS_DIR)/!!CONFIG_CACHE_LINKER!!
|
||||
CONFIG_CACHE_SOURCE = $(SRC_OBJS_DIR)/!!CONFIG_CACHE_SOURCE!!
|
||||
CONFIG_CACHE_VERSION = $(SRC_OBJS_DIR)/!!CONFIG_CACHE_VERSION!!
|
||||
|
||||
OBJS_C := !!OBJS_C!!
|
||||
OBJS_CPP := !!OBJS_CPP!!
|
||||
OBJS_MM := !!OBJS_MM!!
|
||||
OBJS_RC := !!OBJS_RC!!
|
||||
OBJS := $(OBJS_C) $(OBJS_CPP) $(OBJS_MM) $(OBJS_RC)
|
||||
SRCS := !!SRCS!!
|
||||
|
||||
# All C-files depend on those 3 files
|
||||
FILE_DEP := $(CONFIG_CACHE_COMPILER)
|
||||
# Create all dirs and subdirs
|
||||
RES := $(shell mkdir -p $(BIN_DIR) $(sort $(dir $(OBJS))))
|
||||
|
||||
CFLAGS += -I $(SRC_OBJS_DIR) -I $(LANG_OBJS_DIR) -I $(SETTING_OBJS_DIR)
|
||||
CFLAGS_MAKEDEP += -I $(SRC_OBJS_DIR) -I $(LANG_OBJS_DIR) -I $(SETTING_OBJS_DIR)
|
||||
ifdef SCRIPT_SRC_DIR
|
||||
CFLAGS_MAKEDEP += -I $(SCRIPT_SRC_DIR)
|
||||
endif
|
||||
|
||||
# Check if we want to show what we are doing
|
||||
ifdef VERBOSE
|
||||
Q =
|
||||
E = @true
|
||||
else
|
||||
Q = @
|
||||
E = @echo
|
||||
endif
|
||||
|
||||
# Our default target
|
||||
all: $(BIN_DIR)/$(TTD)
|
||||
|
||||
# This are 2 rules that are pointing back to STRGEN stuff.
|
||||
# There is not really a need to have them here, but in case
|
||||
# some weirdo wants to run 'make' in the 'src' dir and expects
|
||||
# the languages to be recompiled, this catches that case and
|
||||
# takes care of it nicely.
|
||||
$(LANG_OBJS_DIR)/$(STRGEN):
|
||||
$(MAKE) -C $(LANG_OBJS_DIR) $(STRGEN)
|
||||
|
||||
$(LANG_OBJS_DIR)/table/strings.h: $(LANG_DIR)/english.txt $(LANG_OBJS_DIR)/$(STRGEN)
|
||||
$(MAKE) -C $(LANG_OBJS_DIR) table/strings.h
|
||||
|
||||
# Always run version detection, so we always have an accurate modified
|
||||
# flag
|
||||
VERSIONS := $(shell AWK="$(AWK)" "$(ROOT_DIR)/findversion.sh")
|
||||
MODIFIED := $(shell echo "$(VERSIONS)" | cut -f 3 -d' ')
|
||||
|
||||
# Use autodetected revisions
|
||||
VERSION := $(shell echo "$(VERSIONS)" | cut -f 1 -d' ')
|
||||
ISODATE := $(shell echo "$(VERSIONS)" | cut -f 2 -d' ')
|
||||
GITHASH := $(shell echo "$(VERSIONS)" | cut -f 4 -d' ')
|
||||
ISTAG := $(shell echo "$(VERSIONS)" | cut -f 5 -d' ')
|
||||
ISSTABLETAG := $(shell echo "$(VERSIONS)" | cut -f 6 -d' ')
|
||||
YEAR := $(shell echo "$(VERSIONS)" | cut -f 7 -d' ')
|
||||
|
||||
# Make sure we have something in VERSION and ISODATE
|
||||
ifeq ($(VERSION),)
|
||||
VERSION := norev000
|
||||
endif
|
||||
ifeq ($(ISODATE),)
|
||||
ISODATE := 00000000
|
||||
endif
|
||||
|
||||
# This helps to recompile if flags change
|
||||
RES := $(shell if [ "`cat $(CONFIG_CACHE_COMPILER) 2>/dev/null`" != "$(CFLAGS) $(CXXFLAGS)" ]; then echo "$(CFLAGS) $(CXXFLAGS)" > $(CONFIG_CACHE_COMPILER); fi )
|
||||
RES := $(shell if [ "`cat $(CONFIG_CACHE_LINKER) 2>/dev/null`" != "$(LDFLAGS) $(LIBS)" ]; then echo "$(LDFLAGS) $(LIBS)" > $(CONFIG_CACHE_LINKER); fi )
|
||||
|
||||
# If there is a change in the source-file-list, make sure we recheck the deps
|
||||
RES := $(shell if [ "`cat $(CONFIG_CACHE_SOURCE) 2>/dev/null`" != "$(SRCS)" ]; then echo "$(SRCS)" > $(CONFIG_CACHE_SOURCE); fi )
|
||||
# If there is a change in the revision, make sure we recompile rev.cpp
|
||||
RES := $(shell if [ "`cat $(CONFIG_CACHE_VERSION) 2>/dev/null`" != "$(VERSION) $(MODIFIED)" ]; then echo "$(VERSION) $(MODIFIED)" > $(CONFIG_CACHE_VERSION); fi )
|
||||
|
||||
ifndef MAKEDEPEND
|
||||
# The slow, but always correct, dep-check
|
||||
DEP_MASK := %.d
|
||||
DEPS := $(OBJS:%.o=%.d)
|
||||
|
||||
# Only include the deps if we are compiling everything
|
||||
ifeq ($(filter %.o clean mrproper, $(MAKECMDGOALS)),)
|
||||
-include $(DEPS)
|
||||
else
|
||||
# In case we want to compile a single target, include the .d file for it
|
||||
ifneq ($(filter %.o, $(MAKECMDGOALS)),)
|
||||
SINGLE_DEP := $(filter %.o, $(MAKECMDGOALS))
|
||||
-include $(SINGLE_DEP:%.o=%.d)
|
||||
endif
|
||||
endif
|
||||
|
||||
# Find the deps via GCC. Rarely wrong, but a bit slow
|
||||
|
||||
$(OBJS_C:%.o=%.d): %.d: $(SRC_DIR)/%.c $(FILE_DEP)
|
||||
$(E) '$(STAGE) DEP $(<:$(SRC_DIR)/%.c=%.c)'
|
||||
$(Q)$(CC_HOST) $(CFLAGS) -MM $< | sed 's@^$(@F:%.d=%.o):@$@ $(@:%.d=%.o):@' > $@
|
||||
|
||||
$(OBJS_CPP:%.o=%.d): %.d: $(SRC_DIR)/%.cpp $(FILE_DEP)
|
||||
$(E) '$(STAGE) DEP $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_HOST) $(CFLAGS) $(CXXFLAGS) -MM $< | sed 's@^$(@F:%.d=%.o):@$@ $(@:%.d=%.o):@' > $@
|
||||
|
||||
$(OBJS_MM:%.o=%.d): %.d: $(SRC_DIR)/%.mm $(FILE_DEP)
|
||||
$(E) '$(STAGE) DEP $(<:$(SRC_DIR)/%.mm=%.mm)'
|
||||
$(Q)$(CXX_HOST) $(CFLAGS) $(CXXFLAGS) -MM $< | sed 's@^$(@F:%.d=%.o):@$@ $(@:%.d=%.o):@' > $@
|
||||
|
||||
$(OBJS_RC:%.o=%.d): %.d: $(SRC_DIR)/%.rc $(FILE_DEP)
|
||||
$(E) '$(STAGE) DEP $(<:$(SRC_DIR)/%.rc=%.rc)'
|
||||
$(Q)touch $@
|
||||
|
||||
else
|
||||
# The much faster, but can be wrong, dep-check
|
||||
DEP_MASK :=
|
||||
DEPS := Makefile.dep
|
||||
|
||||
# Only include the deps if we are not cleaning
|
||||
ifeq ($(filter depend clean mrproper, $(MAKECMDGOALS)),)
|
||||
-include Makefile.dep
|
||||
endif
|
||||
|
||||
ifeq ("$(SRC_OBJS_DIR)/$(DEPEND)","$(MAKEDEPEND)")
|
||||
DEP := $(MAKEDEPEND)
|
||||
$(SRC_OBJS_DIR)/$(DEPEND): $(SRC_DIR)/depend/depend.cpp
|
||||
$(E) '$(STAGE) Compiling and linking $(DEPEND)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) $(LDFLAGS_BUILD) -o $@ $<
|
||||
endif
|
||||
|
||||
# Macro for invoking a command on groups of 100 words at a time
|
||||
# (analogous to xargs(1)). The macro invokes itself recursively
|
||||
# until the list of words is depleted.
|
||||
#
|
||||
# Usage: $(call xargs,COMMAND,LIST)
|
||||
#
|
||||
# COMMAND should be a shell command to which the words will be
|
||||
# appended as arguments in groups of 100.
|
||||
define xargs
|
||||
$(1) $(wordlist 1,100,$(2))
|
||||
$(if $(word 101,$(2)),$(call xargs,$(1),$(wordlist 101,$(words $(2)),$(2))))
|
||||
endef
|
||||
|
||||
# Make sure that only 'make depend' ALWAYS triggers a recheck
|
||||
ifeq ($(filter depend, $(MAKECMDGOALS)),)
|
||||
Makefile.dep: $(FILE_DEP) $(SRCS:%=$(SRC_DIR)/%) $(CONFIG_CACHE_SOURCE) $(DEP)
|
||||
else
|
||||
Makefile.dep: $(FILE_DEP) $(SRCS:%=$(SRC_DIR)/%) $(DEP) FORCE
|
||||
endif
|
||||
$(E) '$(STAGE) DEP CHECK (all files)'
|
||||
$(Q)rm -f Makefile.dep.tmp
|
||||
$(Q)touch Makefile.dep.tmp
|
||||
|
||||
# Calculate the deps via makedepend
|
||||
$(call xargs,$(Q)$(MAKEDEPEND) -f$(SRC_OBJS_DIR)/Makefile.dep.tmp -o.o -Y -v -a -- $(CFLAGS_MAKEDEP) -- 2>/dev/null,$(SRCS:%=$(SRC_DIR)/%))
|
||||
|
||||
# Remove all comments and includes that don't start with $(SRC_DIR)
|
||||
# Remove $(SRC_DIR) from object-file-name
|
||||
@$(AWK) ' \
|
||||
/^# DO NOT/ { print $$0 ; next} \
|
||||
/^#/ {next} \
|
||||
/: / { \
|
||||
left = NF - 1; \
|
||||
for (n = 2; n <= NF; n++) { \
|
||||
if (match($$n, "^$(ROOT_DIR)") == 0) { \
|
||||
$$n = ""; \
|
||||
left--; \
|
||||
} \
|
||||
} \
|
||||
gsub("$(SRC_DIR)/", "", $$1); \
|
||||
if (left > 0) { \
|
||||
print $$0; \
|
||||
$$1 = "Makefile.dep:"; \
|
||||
print $$0; \
|
||||
} \
|
||||
next \
|
||||
} \
|
||||
{ \
|
||||
print $$0 \
|
||||
} \
|
||||
' < Makefile.dep.tmp | sed 's@ *@ @g;s@ $$@@' | LC_ALL=C $(SORT) > Makefile.dep
|
||||
|
||||
$(Q)rm -f Makefile.dep.tmp Makefile.dep.tmp.bak
|
||||
|
||||
endif
|
||||
|
||||
# Avoid problems with deps if a .h/.hpp/.hpp.sq file is deleted without the deps
|
||||
# being updated. Now the Makefile continues, the deps are recreated
|
||||
# and all will be fine.
|
||||
%.h %.hpp %.hpp.sq:
|
||||
@true
|
||||
|
||||
|
||||
# Compile all the files according to the targets
|
||||
|
||||
$(OBJS_C): %.o: $(SRC_DIR)/%.c $(DEP_MASK) $(FILE_DEP)
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.c=%.c)'
|
||||
$(Q)$(CC_HOST) $(CFLAGS) -c -o $@ $<
|
||||
|
||||
$(filter-out %sse2.o, $(filter-out %ssse3.o, $(filter-out %sse4.o, $(OBJS_CPP)))): %.o: $(SRC_DIR)/%.cpp $(DEP_MASK) $(FILE_DEP)
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_HOST) $(CFLAGS) $(CXXFLAGS) -c -o $@ $<
|
||||
|
||||
$(filter %sse2.o, $(OBJS_CPP)): %.o: $(SRC_DIR)/%.cpp $(DEP_MASK) $(FILE_DEP)
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_HOST) $(CFLAGS) $(CXXFLAGS) -c -msse2 -o $@ $<
|
||||
|
||||
$(filter %ssse3.o, $(OBJS_CPP)): %.o: $(SRC_DIR)/%.cpp $(DEP_MASK) $(FILE_DEP)
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_HOST) $(CFLAGS) $(CXXFLAGS) -c -mssse3 -o $@ $<
|
||||
|
||||
$(filter %sse4.o, $(OBJS_CPP)): %.o: $(SRC_DIR)/%.cpp $(DEP_MASK) $(FILE_DEP)
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_HOST) $(CFLAGS) $(CXXFLAGS) -c -msse4.1 -o $@ $<
|
||||
|
||||
$(OBJS_MM): %.o: $(SRC_DIR)/%.mm $(DEP_MASK) $(FILE_DEP)
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.mm=%.mm)'
|
||||
$(Q)$(CXX_HOST) $(CFLAGS) $(CXXFLAGS) -c -o $@ $<
|
||||
|
||||
$(OBJS_RC): %.o: $(SRC_DIR)/%.rc $(FILE_DEP)
|
||||
$(E) '$(STAGE) Compiling resource $(<:$(SRC_DIR)/%.rc=%.rc)'
|
||||
$(Q)$(WINDRES) -o $@ $<
|
||||
|
||||
$(BIN_DIR)/$(TTD): $(TTD)
|
||||
$(Q)cp $(TTD) $(BIN_DIR)/$(TTD)
|
||||
ifeq ($(OS), UNIX)
|
||||
$(Q)cp $(MEDIA_DIR)/openttd.32.bmp $(BIN_DIR)/baseset/
|
||||
endif
|
||||
ifeq ($(OS), OSX)
|
||||
$(Q)cp $(ROOT_DIR)/os/macosx/splash.png $(BIN_DIR)/baseset/
|
||||
endif
|
||||
|
||||
$(TTD): $(OBJS) $(CONFIG_CACHE_LINKER)
|
||||
$(E) '$(STAGE) Linking $@'
|
||||
$(Q)+$(CXX_HOST) $(LDFLAGS) $(OBJS) $(LIBS) -o $@
|
||||
ifdef STRIP
|
||||
$(Q)$(STRIP) $@
|
||||
endif
|
||||
|
||||
# Revision files
|
||||
|
||||
$(SRC_DIR)/rev.cpp: $(CONFIG_CACHE_VERSION) $(SRC_DIR)/rev.cpp.in
|
||||
$(Q)cat $(SRC_DIR)/rev.cpp.in | sed "s@\!\!ISODATE\!\!@$(ISODATE)@g;s@!!VERSION!!@$(VERSION)@g;s@!!MODIFIED!!@$(MODIFIED)@g;s@!!DATE!!@`date +%d.%m.%y`@g;s@!!GITHASH!!@$(GITHASH)@g;s@!!ISTAG!!@$(ISTAG)@g;s@!!ISSTABLETAG!!@$(ISSTABLETAG)@g;s@!!YEAR!!@$(YEAR)@g" > $(SRC_DIR)/rev.cpp
|
||||
|
||||
$(SRC_DIR)/os/windows/ottdres.rc: $(CONFIG_CACHE_VERSION) $(SRC_DIR)/os/windows/ottdres.rc.in
|
||||
$(Q)cat $(SRC_DIR)/os/windows/ottdres.rc.in | sed "s@\!\!ISODATE\!\!@$(ISODATE)@g;s@!!VERSION!!@$(VERSION)@g;s@!!DATE!!@`date +%d.%m.%y`@g;s@!!GITHASH!!@$(GITHASH)@g;s@!!ISTAG!!@$(ISTAG)@g;s@!!ISSTABLETAG!!@$(ISSTABLETAG)@g;s@!!YEAR!!@$(YEAR)@g" > $(SRC_DIR)/os/windows/ottdres.rc
|
||||
|
||||
FORCE:
|
||||
|
||||
depend: $(DEPS)
|
||||
|
||||
clean:
|
||||
$(E) '$(STAGE) Cleaning up object files'
|
||||
$(Q)rm -f $(DEPS) $(OBJS) $(TTD) $(DEPEND) $(TTD:%=$(BIN_DIR)/%) $(BIN_DIR)/baseset/openttd.32.bmp $(CONFIG_CACHE_COMPILER) $(CONFIG_CACHE_LINKER) $(CONFIG_CACHE_SOURCE)
|
||||
|
||||
mrproper: clean
|
||||
$(Q)rm -f $(SRC_DIR)/rev.cpp $(SRC_DIR)/os/windows/ottdres.rc
|
||||
|
||||
%.o:
|
||||
@echo '$(STAGE) No such source-file: $(@:%.o=%).[c|cpp|mm|rc]'
|
||||
|
||||
.PHONY: all mrproper depend clean FORCE
|
73
README.md
73
README.md
@@ -34,7 +34,7 @@ Both 'stable' and 'nightly' versions are available for download:
|
||||
- most people should choose the 'stable' version, as this has been more extensively tested
|
||||
- the 'nightly' version includes the latest changes and features, but may sometimes be less reliable
|
||||
|
||||
OpenTTD is also available for free on [Steam](https://store.steampowered.com/app/1536610/OpenTTD/), [GOG.com](https://www.gog.com/game/openttd), and the [Microsoft Store](https://www.microsoft.com/p/openttd-official/9ncjg5rvrr1c). On some platforms OpenTTD will be available via your OS package manager or a similar service.
|
||||
On some platforms OpenTTD will also be available via your OS package manager or a similar service.
|
||||
|
||||
|
||||
## 1.2) OpenTTD gameplay manual
|
||||
@@ -46,13 +46,15 @@ OpenTTD has a [community-maintained wiki](https://wiki.openttd.org/), including
|
||||
|
||||
OpenTTD has been ported to several platforms and operating systems.
|
||||
|
||||
The currently supported platforms are:
|
||||
The currently working platforms are:
|
||||
|
||||
- Linux (SDL (OpenGL and non-OpenGL))
|
||||
- macOS (universal) (Cocoa)
|
||||
- Windows (Win32 GDI / OpenGL)
|
||||
|
||||
Other platforms may also work (in particular various BSD systems), but we don't actively test or maintain these.
|
||||
- FreeBSD (SDL)
|
||||
- Haiku (SDL)
|
||||
- Linux (SDL)
|
||||
- macOS (universal) (Cocoa video and sound drivers)
|
||||
- OpenBSD (SDL)
|
||||
- OS/2 (SDL)
|
||||
- Windows (Win32 GDI (faster) or SDL)
|
||||
|
||||
### 1.3.1) Legacy support
|
||||
Platforms, languages and compilers change.
|
||||
@@ -63,13 +65,13 @@ Please report a bug if you find a save that doesn't load.
|
||||
|
||||
## 1.4) Installing and running OpenTTD
|
||||
|
||||
OpenTTD is usually straightforward to install, but for more help the wiki [includes an installation guide](https://wiki.openttd.org/en/Manual/Installation).
|
||||
OpenTTD is usually straightforward to install, but for more help the wiki [includes an installation guide](https://wiki.openttd.org/Installation).
|
||||
|
||||
OpenTTD needs some additional graphics and sound files to run.
|
||||
|
||||
For some platforms these will be downloaded during the installation process if required.
|
||||
|
||||
For some platforms, you will need to refer to [the installation guide](https://wiki.openttd.org/en/Manual/Installation).
|
||||
For some platforms, you will need to refer to [the installation guide](https://wiki.openttd.org/Installation).
|
||||
|
||||
|
||||
### 1.4.1) Free graphics and sound files
|
||||
@@ -77,9 +79,9 @@ For some platforms, you will need to refer to [the installation guide](https://w
|
||||
The free data files, split into OpenGFX for graphics, OpenSFX for sounds and
|
||||
OpenMSX for music can be found at:
|
||||
|
||||
- https://www.openttd.org/downloads/opengfx-releases/latest for OpenGFX
|
||||
- https://www.openttd.org/downloads/opensfx-releases/latest for OpenSFX
|
||||
- https://www.openttd.org/downloads/openmsx-releases/latest for OpenMSX
|
||||
- https://www.openttd.org/download-opengfx for OpenGFX
|
||||
- https://www.openttd.org/download-opensfx for OpenSFX
|
||||
- https://www.openttd.org/download-openmsx for OpenMSX
|
||||
|
||||
Please follow the readme of these packages about the installation procedure.
|
||||
The Windows installer can optionally download and install these packages.
|
||||
@@ -114,8 +116,37 @@ OpenTTD features multiple types of add-on content, which modify gameplay in diff
|
||||
|
||||
Most types of add-on content can be downloaded within OpenTTD via the 'Check Online Content' button in the main menu.
|
||||
|
||||
Add-on content can also be installed manually, but that's more complicated; the [OpenTTD wiki](https://wiki.openttd.org/) may offer help with that, or the [OpenTTD directory structure guide](./docs/directory_structure.md).
|
||||
Add-on content can also be installed manually, but that's more complicated; the [OpenTTD wiki](https://wiki.openttd.org/OpenTTD) may offer help with that, or the [OpenTTD directory structure guide](./docs/directory_structure.md).
|
||||
|
||||
### 1.5.1) AI opponents
|
||||
|
||||
OpenTTD comes without AI opponents, so if you want to play with AIs you have to download them.
|
||||
|
||||
The easiest way is via the 'Check Online Content' button in the main menu.
|
||||
|
||||
You can select some AIs that you think are compatible with your playing style.
|
||||
|
||||
AI help and discussions may also be found in the [AI section of the forum](https://www.tt-forums.net/viewforum.php?f=65).
|
||||
|
||||
### 1.5.2) Scenarios and height maps
|
||||
|
||||
Scenarios and heightmaps can be added via the 'Check Online Content' button in the main menu.
|
||||
|
||||
### 1.5.3) NewGRFs
|
||||
|
||||
A wide range of add-content is available as NewGRFs, including vehicles, industries, stations, landscape objects, town names and more.
|
||||
|
||||
NewGRFs can be added via the 'Check Online Content' button in the main menu.
|
||||
|
||||
See also the wiki [guide to NewGRFs](https://wiki.openttd.org/NewGRF) and [the forum graphics development section](https://www.tt-forums.net/viewforum.php?f=66).
|
||||
|
||||
### 1.5.4) Game scripts
|
||||
|
||||
Game scripts can provide additional challenges or changes to the standard OpenTTD gameplay, for example setting transport goals, or changing town growth behaviour.
|
||||
|
||||
Game scripts can be added via the 'Check Online Content' button in the main menu.
|
||||
|
||||
See also the wiki [guide to game scripts](https://wiki.openttd.org/Game_script) and [the forum graphics game script section](https://www.tt-forums.net/viewforum.php?f=65).
|
||||
|
||||
### 1.6) OpenTTD directories
|
||||
|
||||
@@ -133,15 +164,14 @@ If you want to compile OpenTTD from source, instructions can be found in [COMPIL
|
||||
'Official' channels
|
||||
|
||||
- [OpenTTD website](https://www.openttd.org)
|
||||
- [OpenTTD official Discord](https://discord.gg/openttd)
|
||||
- IRC chat using #openttd on irc.oftc.net [more info about our irc channel](https://wiki.openttd.org/en/Development/IRC%20channel)
|
||||
- [OpenTTD on Github](https://github.com/OpenTTD/) for code repositories and for reporting issues
|
||||
- IRC chat using #openttd on irc.oftc.net [more info about our irc channel](https://wiki.openttd.org/Irc)
|
||||
- [OpenTTD on Github](https://github.com/openTTD/) for code repositories and for reporting issues
|
||||
- [forum.openttd.org](https://forum.openttd.org/) - the primary community forum site for discussing OpenTTD and related games
|
||||
- [OpenTTD wiki](https://wiki.openttd.org/) community-maintained wiki, including topics like gameplay guide, detailed explanation of some game mechanics, how to use add-on content (mods) and much more
|
||||
|
||||
'Unofficial' channels
|
||||
|
||||
- the OpenTTD wiki has a [page listing OpenTTD communities](https://wiki.openttd.org/en/Community/Community) including some in languages other than English
|
||||
- the OpenTTD wiki has a [page listing OpenTTD communities](https://wiki.openttd.org/Community) including some in languages other than English
|
||||
|
||||
|
||||
### 2.1) Contributing to OpenTTD
|
||||
@@ -174,14 +204,9 @@ See `src/3rdparty/squirrel/COPYRIGHT` for the complete license text.
|
||||
The md5 implementation in `src/3rdparty/md5` is licensed under the Zlib license.
|
||||
See the comments in the source files in `src/3rdparty/md5` for the complete license text.
|
||||
|
||||
The fmt implementation in `src/3rdparty/fmt` is licensed under the MIT license.
|
||||
See `src/3rdparty/fmt/LICENSE.rst` for the complete license text.
|
||||
The implementations of Posix `getaddrinfo` and `getnameinfo` for OS/2 in `src/3rdparty/os2` are distributed partly under the GNU Lesser General Public License 2.1, and partly under the (3-clause) BSD license.
|
||||
The exact licensing terms can be found in `src/3rdparty/os2/getaddrinfo.c` resp. `src/3rdparty/os2/getnameinfo.c`.
|
||||
|
||||
The catch2 implementation in `src/3rdparty/catch2` is licensed under the Boost Software License, Version 1.0.
|
||||
See `src/3rdparty/catch2/LICENSE.txt` for the complete license text.
|
||||
|
||||
The icu scriptrun implementation in `src/3rdparty/icu` is licensed under the Unicode license.
|
||||
See `src/3rdparty/icu/LICENSE` for the complete license text.
|
||||
|
||||
## 4.0 Credits
|
||||
|
||||
|
72
azure-pipelines-ci.yml
Normal file
72
azure-pipelines-ci.yml
Normal file
@@ -0,0 +1,72 @@
|
||||
trigger:
|
||||
- master
|
||||
- release/*
|
||||
pr:
|
||||
- master
|
||||
- release/*
|
||||
|
||||
jobs:
|
||||
- job: windows
|
||||
displayName: 'Windows'
|
||||
pool:
|
||||
vmImage: 'VS2017-Win2016'
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
Win32:
|
||||
BuildPlatform: 'Win32'
|
||||
Win64:
|
||||
BuildPlatform: 'x64'
|
||||
|
||||
steps:
|
||||
- template: azure-pipelines/templates/ci-git-rebase.yml
|
||||
- template: azure-pipelines/templates/windows-dependencies.yml
|
||||
- template: azure-pipelines/templates/ci-opengfx.yml
|
||||
- template: azure-pipelines/templates/windows-build.yml
|
||||
parameters:
|
||||
BuildPlatform: $(BuildPlatform)
|
||||
BuildConfiguration: Debug
|
||||
- script: |
|
||||
call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" x86
|
||||
cd projects
|
||||
call regression.bat
|
||||
displayName: 'Test'
|
||||
|
||||
- job: linux
|
||||
displayName: 'Linux'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
linux-amd64-clang-3.9:
|
||||
Tag: 'linux-amd64-clang-3.9'
|
||||
linux-amd64-gcc-6:
|
||||
Tag: 'linux-amd64-gcc-6'
|
||||
linux-i386-gcc-6:
|
||||
Tag: 'linux-i386-gcc-6'
|
||||
|
||||
steps:
|
||||
- template: azure-pipelines/templates/ci-git-rebase.yml
|
||||
# The dockers already have the dependencies installed
|
||||
# The dockers already have OpenGFX installed
|
||||
- template: azure-pipelines/templates/linux-build.yml
|
||||
parameters:
|
||||
Image: compile-farm-ci
|
||||
Tag: $(Tag)
|
||||
|
||||
- job: macos
|
||||
displayName: 'MacOS'
|
||||
pool:
|
||||
vmImage: 'macOS-10.14'
|
||||
|
||||
variables:
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.9
|
||||
|
||||
steps:
|
||||
- template: azure-pipelines/templates/ci-git-rebase.yml
|
||||
- template: azure-pipelines/templates/osx-dependencies.yml
|
||||
- template: azure-pipelines/templates/ci-opengfx.yml
|
||||
- template: azure-pipelines/templates/osx-build.yml
|
||||
- script: 'make regression'
|
||||
displayName: 'Test'
|
10
azure-pipelines-release-stable.yml
Normal file
10
azure-pipelines-release-stable.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
trigger:
|
||||
branches:
|
||||
include:
|
||||
- refs/tags/*
|
||||
pr: none
|
||||
|
||||
jobs:
|
||||
- template: azure-pipelines/templates/release.yml
|
||||
parameters:
|
||||
IsStableRelease: true
|
7
azure-pipelines-release.yml
Normal file
7
azure-pipelines-release.yml
Normal file
@@ -0,0 +1,7 @@
|
||||
trigger: none
|
||||
pr: none
|
||||
|
||||
jobs:
|
||||
- template: azure-pipelines/templates/release.yml
|
||||
parameters:
|
||||
IsStableRelease: false
|
@@ -4,9 +4,9 @@ tag=$(git name-rev --name-only --tags --no-undefined HEAD 2>/dev/null | sed 's@\
|
||||
|
||||
# If we are a tag, show the part of the changelog till (but excluding) the last stable
|
||||
if [ -n "$tag" ]; then
|
||||
grep='^[0-9]\+\.[0-9]\+[^-]'
|
||||
grep='^[0-9]\+\.[0-9]\+\.[0-9]\+[^-]'
|
||||
next=$(cat changelog.txt | grep '^[0-9]' | awk 'BEGIN { show="false" } // { if (show=="true") print $0; if ($1=="'$tag'") show="true"} ' | grep "$grep" | head -n1 | sed 's/ .*//')
|
||||
cat changelog.txt | awk 'BEGIN { show="false" } /^[0-9]+.[0-9]+/ { if ($1=="'$next'") show="false"; if ($1=="'$tag'") show="true";} // { if (show=="true") print $0 }'
|
||||
cat changelog.txt | awk 'BEGIN { show="false" } /^[0-9]+.[0-9]+.[0-9]+/ { if ($1=="'$next'") show="false"; if ($1=="'$tag'") show="true";} // { if (show=="true") print $0 }'
|
||||
exit 0
|
||||
fi
|
||||
|
87
azure-pipelines/manifest.sh
Executable file
87
azure-pipelines/manifest.sh
Executable file
@@ -0,0 +1,87 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "Usage: $0 <folder-with-bundles>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
FOLDER=$1
|
||||
|
||||
if [ ! -e .version ] || [ ! -e .release_date ]; then
|
||||
echo "This script should be executed in the root of an extracted source tarball"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Find the name based on the version
|
||||
if [ -e .is_stable ]; then
|
||||
isTesting=$(cat .version | grep "RC\|beta" || true)
|
||||
if [ -z "${isTesting}" ]; then
|
||||
NAME="stable"
|
||||
else
|
||||
NAME="testing"
|
||||
fi
|
||||
else
|
||||
NAME=$(cat .version | cut -d- -f2 | cut -d- -f-2)
|
||||
fi
|
||||
|
||||
# Convert the date to a YAML date
|
||||
DATE=$(cat .release_date | tr ' ' T | sed 's/TUTC/:00-00:00/')
|
||||
VERSION=$(cat .version)
|
||||
BASE="openttd-${VERSION}"
|
||||
|
||||
echo "name: ${NAME}" > manifest.yaml
|
||||
echo "date: ${DATE}" >> manifest.yaml
|
||||
echo "base: ${BASE}-" >> manifest.yaml
|
||||
|
||||
error=""
|
||||
|
||||
FILES=
|
||||
DEV_FILES=
|
||||
for filename in $(ls ${FOLDER} | grep -v ".txt$\|.md$\|sum$" | sort); do
|
||||
case ${filename} in
|
||||
*docs* |\
|
||||
*source* |\
|
||||
*dbg.deb |\
|
||||
*pdb.xz )
|
||||
DEV_FILES="${DEV_FILES} ${filename}"
|
||||
;;
|
||||
|
||||
*)
|
||||
FILES="${FILES} ${filename}"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# output_files key filename...
|
||||
output_files() {
|
||||
if [ "$#" -lt 2 ]; then return; fi
|
||||
key=$1
|
||||
echo "${key}:" >> manifest.yaml
|
||||
shift
|
||||
while [ "$#" -gt 0 ]; do
|
||||
filename=$1
|
||||
if [ ! -e ${FOLDER}/${filename}.md5sum ] || [ ! -e ${FOLDER}/${filename}.sha1sum ] || [ ! -e ${FOLDER}/${filename}.sha256sum ]; then
|
||||
echo "ERROR: missing checksum file for ${filename}" 1>&2
|
||||
error="y"
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "- id: ${filename}" >> manifest.yaml
|
||||
echo " size: $(stat -c"%s" ${FOLDER}/${filename})" >> manifest.yaml
|
||||
echo " md5sum: $(cat ${FOLDER}/${filename}.md5sum | cut -d\ -f1)" >> manifest.yaml
|
||||
echo " sha1sum: $(cat ${FOLDER}/${filename}.sha1sum | cut -d\ -f1)" >> manifest.yaml
|
||||
echo " sha256sum: $(cat ${FOLDER}/${filename}.sha256sum | cut -d\ -f1)" >> manifest.yaml
|
||||
shift
|
||||
done
|
||||
}
|
||||
|
||||
output_files files ${FILES}
|
||||
output_files dev_files ${DEV_FILES}
|
||||
|
||||
if [ -n "${error}" ]; then
|
||||
echo "ERROR: exiting due to earlier errors" 1>&2
|
||||
exit 1
|
||||
fi
|
10
azure-pipelines/templates/ci-git-rebase.yml
Normal file
10
azure-pipelines/templates/ci-git-rebase.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
steps:
|
||||
# Rebase to target branch for every PR. This means users don't have to
|
||||
# rebase every time target branch changes. As long as the PR applies cleanly, we
|
||||
# will validate it.
|
||||
- bash: |
|
||||
git config user.email 'info@openttd.org'
|
||||
git config user.name 'OpenTTD CI'
|
||||
git rebase origin/${SYSTEM_PULLREQUEST_TARGETBRANCH}
|
||||
displayName: 'Rebase to target branch'
|
||||
condition: and(succeeded(), eq(variables['Build.Reason'], 'PullRequest'))
|
8
azure-pipelines/templates/ci-opengfx.yml
Normal file
8
azure-pipelines/templates/ci-opengfx.yml
Normal file
@@ -0,0 +1,8 @@
|
||||
steps:
|
||||
- bash: |
|
||||
set -ex
|
||||
cd bin/baseset
|
||||
curl -L https://cdn.openttd.org/opengfx-releases/0.6.0/opengfx-0.6.0-all.zip > opengfx-all.zip
|
||||
unzip opengfx-all.zip
|
||||
rm -f opengfx-all.zip
|
||||
displayName: 'Install OpenGFX'
|
36
azure-pipelines/templates/linux-build.yml
Normal file
36
azure-pipelines/templates/linux-build.yml
Normal file
@@ -0,0 +1,36 @@
|
||||
parameters:
|
||||
Image: ''
|
||||
Tag: ''
|
||||
ContainerCommand: ''
|
||||
|
||||
steps:
|
||||
# 'envVars' in the 'Docker@1' task is a bit funky. When you want to use a
|
||||
# variable, you have to quote it. But the quote is also sent directly to
|
||||
# Docker and ends up in the variable, which you don't want. To work around
|
||||
# this, we set the correct variable first (which becomes an env-variable), and
|
||||
# pass that env-variable through to Docker. We cannot use the normal
|
||||
# 'variables' entry, as we are a template. So that results in this bit of
|
||||
# Bash code. Not because it is pretty, but it is the only way we found that
|
||||
# works.
|
||||
- bash: |
|
||||
echo "##vso[task.setvariable variable=TARGET_BRANCH]${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
||||
echo "Target branch is ${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
||||
displayName: "Set target branch"
|
||||
condition: and(succeeded(), eq(variables['Build.Reason'], 'PullRequest'))
|
||||
|
||||
- task: Docker@1
|
||||
${{ if eq(parameters.Image, 'compile-farm') }}:
|
||||
displayName: 'Build'
|
||||
${{ if eq(parameters.Image, 'compile-farm-ci') }}:
|
||||
displayName: 'Build and test'
|
||||
# Run the commit-checker only if it is a Pull Request
|
||||
condition: and(succeeded(), or(not(contains(variables['Agent.JobName'], 'commit-checker')), eq(variables['Build.Reason'], 'PullRequest')))
|
||||
inputs:
|
||||
command: 'Run an image'
|
||||
imageName: openttd/${{ parameters.Image }}:${{ parameters.Tag }}
|
||||
volumes: '$(Build.SourcesDirectory):$(Build.SourcesDirectory)'
|
||||
workingDirectory: '$(Build.SourcesDirectory)'
|
||||
containerCommand: ${{ parameters.ContainerCommand }}
|
||||
runInBackground: false
|
||||
envVars: |
|
||||
TARGET_BRANCH
|
5
azure-pipelines/templates/linux-claim-bundles.yml
Normal file
5
azure-pipelines/templates/linux-claim-bundles.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
steps:
|
||||
# Because we run the compile in a docker (under root), we are not owner
|
||||
# of the 'bundles' folder. Fix that by executing a chown on it.
|
||||
- bash: sudo chown -R $(id -u):$(id -g) bundles
|
||||
displayName: 'Claim bundles folder back'
|
5
azure-pipelines/templates/osx-build.yml
Normal file
5
azure-pipelines/templates/osx-build.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
steps:
|
||||
- script: './configure PKG_CONFIG_PATH=/usr/local/lib/pkgconfig --enable-static'
|
||||
displayName: 'Configure'
|
||||
- script: 'make -j2'
|
||||
displayName: 'Build'
|
12
azure-pipelines/templates/osx-dependencies.yml
Normal file
12
azure-pipelines/templates/osx-dependencies.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
steps:
|
||||
- script: |
|
||||
set -ex
|
||||
HOMEBREW_NO_AUTO_UPDATE=1 brew install pkg-config lzo xz libpng freetype
|
||||
# Remove the dynamic libraries of these libraries, to ensure we use
|
||||
# the static versions. That is important, as it is unlikely any
|
||||
# end-user has these brew libraries installed.
|
||||
rm /usr/local/Cellar/lzo/*/lib/*.dylib
|
||||
rm /usr/local/Cellar/xz/*/lib/*.dylib
|
||||
rm /usr/local/Cellar/libpng/*/lib/*.dylib
|
||||
rm /usr/local/Cellar/freetype/*/lib/*.dylib
|
||||
displayName: 'Install dependencies'
|
19
azure-pipelines/templates/release-bundles.yml
Normal file
19
azure-pipelines/templates/release-bundles.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
parameters:
|
||||
CalculateChecksums: true
|
||||
|
||||
steps:
|
||||
- ${{ if eq(parameters.CalculateChecksums, true) }}:
|
||||
- bash: |
|
||||
set -ex
|
||||
cd bundles
|
||||
for i in $(ls); do
|
||||
openssl dgst -r -md5 -hex $i > $i.md5sum
|
||||
openssl dgst -r -sha1 -hex $i > $i.sha1sum
|
||||
openssl dgst -r -sha256 -hex $i > $i.sha256sum
|
||||
done
|
||||
displayName: 'Calculate checksums'
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish bundles'
|
||||
inputs:
|
||||
PathtoPublish: bundles/
|
||||
ArtifactName: bundles
|
20
azure-pipelines/templates/release-fetch-source.yml
Normal file
20
azure-pipelines/templates/release-fetch-source.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
# Fetch the source tarball as prepared by an earlier job. In there is the
|
||||
# version predefined. This ensures we are all going to compile the same
|
||||
# source with the same version.
|
||||
|
||||
steps:
|
||||
- checkout: none
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: 'Download source'
|
||||
inputs:
|
||||
downloadType: specific
|
||||
itemPattern: 'bundles/openttd-*-source.tar.xz'
|
||||
downloadPath: '$(Build.ArtifactStagingDirectory)'
|
||||
- bash: tar --xz -xf ../a/bundles/openttd-*-source.tar.xz --strip-components=1
|
||||
displayName: 'Extracting source'
|
||||
- bash: |
|
||||
set -e
|
||||
VERSION=$(cat .version)
|
||||
echo "${VERSION}"
|
||||
echo "##vso[build.updatebuildnumber]${VERSION}"
|
||||
displayName: 'Change BuildNumber to version'
|
20
azure-pipelines/templates/release-manifest.yml
Normal file
20
azure-pipelines/templates/release-manifest.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
parameters:
|
||||
IsStableRelease: false
|
||||
|
||||
steps:
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: 'Download all bundles'
|
||||
inputs:
|
||||
downloadType: specific
|
||||
itemPattern: 'bundles/*'
|
||||
downloadPath: '$(Build.ArtifactStagingDirectory)'
|
||||
- ${{ if eq(parameters.IsStableRelease, true) }}:
|
||||
- script: |
|
||||
touch .is_stable
|
||||
displayName: 'Mark as stable release'
|
||||
- script: |
|
||||
set -ex
|
||||
./azure-pipelines/manifest.sh ../a/bundles/
|
||||
mkdir -p bundles
|
||||
mv manifest.yaml bundles/
|
||||
displayName: 'Create manifest.yaml'
|
35
azure-pipelines/templates/release-prepare-source.yml
Normal file
35
azure-pipelines/templates/release-prepare-source.yml
Normal file
@@ -0,0 +1,35 @@
|
||||
# Set the revisions, and remove the VCS files.
|
||||
# This ensures everything else picks up on the predefined versions, and not
|
||||
# that because of some build process the version all of a sudden changes.
|
||||
|
||||
steps:
|
||||
- script: |
|
||||
set -ex
|
||||
|
||||
if [ -n "${SYSTEM_PULLREQUEST_PULLREQUESTNUMBER}" ]; then
|
||||
# We are triggered from a GitHub Pull Request
|
||||
git checkout -B pr${SYSTEM_PULLREQUEST_PULLREQUESTNUMBER}
|
||||
elif [ "${BUILD_SOURCEBRANCHNAME}" = "merge" ] || [ "${BUILD_SOURCEBRANCHNAME}" = "head" ]; then
|
||||
# We are manually triggered based on a GitHub Pull Request
|
||||
PULLREQUESTNUMBER=$(echo ${BUILD_SOURCEBRANCH} | cut -d/ -f3)
|
||||
git checkout -B pr${PULLREQUESTNUMBER}
|
||||
else
|
||||
git checkout -B ${BUILD_SOURCEBRANCHNAME}
|
||||
fi
|
||||
|
||||
./findversion.sh > .ottdrev
|
||||
./azure-pipelines/changelog.sh > .changelog
|
||||
TZ='UTC' date +"%Y-%m-%d %H:%M UTC" > .release_date
|
||||
cat .ottdrev | cut -f 1 -d$'\t' > .version
|
||||
echo "Release Date: $(cat .release_date)"
|
||||
echo "Revision: $(cat .ottdrev)"
|
||||
echo "Version: $(cat .version)"
|
||||
displayName: 'Create version files'
|
||||
- script: |
|
||||
set -e
|
||||
VERSION=$(cat .version)
|
||||
echo "${VERSION}"
|
||||
echo "##vso[build.updatebuildnumber]${VERSION}"
|
||||
displayName: 'Change BuildNumber to version'
|
||||
- script: find . -iname .hg -or -iname .git -or -iname .svn | xargs rm -rf
|
||||
displayName: 'Remove VCS information'
|
186
azure-pipelines/templates/release.yml
Normal file
186
azure-pipelines/templates/release.yml
Normal file
@@ -0,0 +1,186 @@
|
||||
parameters:
|
||||
# If this is false, not all targets are triggered. For example:
|
||||
# The NSIS installer for Windows and the creation of debs only work for
|
||||
# releases. Not for any other type of binary. So they are skilled if this
|
||||
# is set to false.
|
||||
IsStableRelease: false
|
||||
|
||||
jobs:
|
||||
- job: source
|
||||
displayName: 'Source'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
|
||||
steps:
|
||||
- template: release-prepare-source.yml
|
||||
- script: |
|
||||
set -ex
|
||||
|
||||
# Rename the folder to openttd-NNN-source
|
||||
mkdir openttd-$(Build.BuildNumber)
|
||||
find . -maxdepth 1 -not -name . -not -name openttd-$(Build.BuildNumber) -exec mv {} openttd-$(Build.BuildNumber)/ \;
|
||||
# Copy back release_date, as it is needed for the template 'release-bundles'
|
||||
cp openttd-$(Build.BuildNumber)/.release_date .release_date
|
||||
|
||||
mkdir bundles
|
||||
tar --xz -cf bundles/openttd-$(Build.BuildNumber)-source.tar.xz openttd-$(Build.BuildNumber)
|
||||
zip -9 -r -q bundles/openttd-$(Build.BuildNumber)-source.zip openttd-$(Build.BuildNumber)
|
||||
displayName: 'Create bundle'
|
||||
- template: release-bundles.yml
|
||||
|
||||
- job: meta
|
||||
displayName: 'Metadata'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
dependsOn: source
|
||||
|
||||
steps:
|
||||
- template: release-fetch-source.yml
|
||||
- script: |
|
||||
set -ex
|
||||
|
||||
mkdir -p bundles
|
||||
cp .changelog bundles/changelog.txt
|
||||
cp .release_date bundles/released.txt
|
||||
cp README.md bundles/README.md
|
||||
displayName: 'Copy meta files'
|
||||
- template: release-bundles.yml
|
||||
parameters:
|
||||
CalculateChecksums: false
|
||||
|
||||
- job: docs
|
||||
displayName: 'Docs'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
dependsOn: source
|
||||
|
||||
steps:
|
||||
- template: release-fetch-source.yml
|
||||
- template: linux-build.yml
|
||||
parameters:
|
||||
Image: compile-farm
|
||||
ContainerCommand: '$(Build.BuildNumber)'
|
||||
Tag: docs
|
||||
- template: linux-claim-bundles.yml
|
||||
- template: release-bundles.yml
|
||||
|
||||
- job: windows
|
||||
displayName: 'Windows'
|
||||
pool:
|
||||
vmImage: 'VS2017-Win2016'
|
||||
dependsOn: source
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
Win32:
|
||||
BuildPlatform: 'Win32'
|
||||
BundlePlatform: 'win32'
|
||||
Win64:
|
||||
BuildPlatform: 'x64'
|
||||
BundlePlatform: 'win64'
|
||||
|
||||
steps:
|
||||
- template: release-fetch-source.yml
|
||||
- template: windows-dependencies.yml
|
||||
- template: windows-dependency-zip.yml
|
||||
- ${{ if eq(parameters.IsStableRelease, true) }}:
|
||||
- template: windows-dependency-nsis.yml
|
||||
- template: windows-build.yml
|
||||
parameters:
|
||||
BuildPlatform: $(BuildPlatform)
|
||||
BuildConfiguration: Release
|
||||
- bash: |
|
||||
set -ex
|
||||
make -f Makefile.msvc bundle_pdb bundle_zip PLATFORM=$(BundlePlatform) BUNDLE_NAME=openttd-$(Build.BuildNumber)-windows-$(BundlePlatform)
|
||||
displayName: 'Create bundles'
|
||||
- ${{ if eq(parameters.IsStableRelease, true) }}:
|
||||
- bash: |
|
||||
set -ex
|
||||
# NSIS will be part of the Hosted image in the next update. Till then, we set the PATH ourself
|
||||
export PATH="${PATH}:/c/Program Files (x86)/NSIS"
|
||||
make -f Makefile.msvc bundle_exe PLATFORM=$(BundlePlatform) BUNDLE_NAME=openttd-$(Build.BuildNumber)-windows-$(BundlePlatform)
|
||||
displayName: 'Create installer bundle'
|
||||
- template: release-bundles.yml
|
||||
|
||||
- ${{ if eq(parameters.IsStableRelease, true) }}:
|
||||
- job: linux_stable
|
||||
displayName: 'Linux'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
dependsOn: source
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
linux-ubuntu-xenial-i386-gcc:
|
||||
Tag: 'linux-ubuntu-xenial-i386-gcc'
|
||||
linux-ubuntu-xenial-amd64-gcc:
|
||||
Tag: 'linux-ubuntu-xenial-amd64-gcc'
|
||||
linux-ubuntu-bionic-i386-gcc:
|
||||
Tag: 'linux-ubuntu-bionic-i386-gcc'
|
||||
linux-ubuntu-bionic-amd64-gcc:
|
||||
Tag: 'linux-ubuntu-bionic-amd64-gcc'
|
||||
linux-ubuntu-focal-amd64-gcc:
|
||||
Tag: 'linux-ubuntu-focal-amd64-gcc'
|
||||
linux-debian-stretch-i386-gcc:
|
||||
Tag: 'linux-debian-stretch-i386-gcc'
|
||||
linux-debian-stretch-amd64-gcc:
|
||||
Tag: 'linux-debian-stretch-amd64-gcc'
|
||||
linux-debian-buster-i386-gcc:
|
||||
Tag: 'linux-debian-buster-i386-gcc'
|
||||
linux-debian-buster-amd64-gcc:
|
||||
Tag: 'linux-debian-buster-amd64-gcc'
|
||||
|
||||
steps:
|
||||
- template: release-fetch-source.yml
|
||||
- template: linux-build.yml
|
||||
parameters:
|
||||
Image: compile-farm
|
||||
ContainerCommand: '$(Build.BuildNumber)'
|
||||
Tag: $(Tag)
|
||||
- template: linux-claim-bundles.yml
|
||||
- template: release-bundles.yml
|
||||
|
||||
- job: macos
|
||||
displayName: 'MacOS'
|
||||
pool:
|
||||
vmImage: 'macOS-10.14'
|
||||
dependsOn: source
|
||||
|
||||
variables:
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.9
|
||||
|
||||
steps:
|
||||
- template: release-fetch-source.yml
|
||||
- template: osx-dependencies.yml
|
||||
- template: osx-build.yml
|
||||
- script: 'make bundle_zip bundle_dmg BUNDLE_NAME=openttd-$(Build.BuildNumber)-macosx'
|
||||
displayName: 'Create bundles'
|
||||
- template: release-bundles.yml
|
||||
|
||||
- job: manifest
|
||||
displayName: 'Manifest'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
dependsOn:
|
||||
- source
|
||||
- docs
|
||||
- windows
|
||||
- ${{ if eq(parameters.IsStableRelease, true) }}:
|
||||
- linux_stable
|
||||
- macos
|
||||
# "Skipped" is not a status, and is not succeeded. So it seems to be
|
||||
# considered failed. So we trigger if all the earlier jobs are done (which
|
||||
# might be succeeded, failed, or skipped), and run this job. This is not
|
||||
# optimal, but given the rules, it is the only way to get this to work (as
|
||||
# some jobs might be skipped).
|
||||
condition: succeededOrFailed()
|
||||
|
||||
steps:
|
||||
- template: release-fetch-source.yml
|
||||
- template: release-manifest.yml
|
||||
${{ if eq(parameters.IsStableRelease, true) }}:
|
||||
parameters:
|
||||
IsStableRelease: true
|
||||
- template: release-bundles.yml
|
||||
parameters:
|
||||
CalculateChecksums: false
|
11
azure-pipelines/templates/windows-build.yml
Normal file
11
azure-pipelines/templates/windows-build.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
parameters:
|
||||
BuildPlatform: ''
|
||||
|
||||
steps:
|
||||
- task: VSBuild@1
|
||||
displayName: 'Build'
|
||||
inputs:
|
||||
solution: 'projects/openttd_vs141.sln'
|
||||
platform: ${{ parameters.BuildPlatform }}
|
||||
configuration: ${{ parameters.BuildConfiguration }}
|
||||
maximumCpuCount: true
|
14
azure-pipelines/templates/windows-dependencies.yml
Normal file
14
azure-pipelines/templates/windows-dependencies.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
steps:
|
||||
- bash: |
|
||||
set -ex
|
||||
|
||||
curl -L https://github.com/OpenTTD/CompileFarm/releases/download/latest/windows-dependencies.zip > windows-dependencies.zip
|
||||
unzip windows-dependencies.zip
|
||||
rm -f windows-dependencies.zip
|
||||
|
||||
mv windows-dependencies/installed /c/vcpkg/
|
||||
rm -rf windows-dependencies
|
||||
displayName: 'Install dependencies'
|
||||
workingDirectory: $(Build.ArtifactStagingDirectory)
|
||||
- script: c:\vcpkg\vcpkg.exe integrate install
|
||||
displayName: 'Integrate vcpkg'
|
26
azure-pipelines/templates/windows-dependency-nsis.yml
Normal file
26
azure-pipelines/templates/windows-dependency-nsis.yml
Normal file
@@ -0,0 +1,26 @@
|
||||
parameters:
|
||||
condition: true
|
||||
|
||||
steps:
|
||||
- bash: |
|
||||
set -ex
|
||||
|
||||
mkdir nsis-plugin; cd nsis-plugin
|
||||
curl -L https://devs.openttd.org/~truebrain/nsis-plugins/Nsis7z.zip > Nsis7z.zip
|
||||
unzip Nsis7z.zip
|
||||
cp -R Plugins/* "/c/Program Files (x86)/NSIS/Plugins/"
|
||||
cd ..; rm -rf nsis-plugin
|
||||
|
||||
mkdir nsis-plugin; cd nsis-plugin
|
||||
curl -L https://devs.openttd.org/~truebrain/nsis-plugins/NsisGetVersion.zip > NsisGetVersion.zip
|
||||
unzip NsisGetVersion.zip
|
||||
cp -R Plugins/* "/c/Program Files (x86)/NSIS/Plugins/x86-ansi/"
|
||||
cd ..; rm -rf nsis-plugin
|
||||
|
||||
mkdir nsis-plugin; cd nsis-plugin
|
||||
curl -L https://devs.openttd.org/~truebrain/nsis-plugins/NsisFindProc.zip > NsisFindProc.zip
|
||||
unzip NsisFindProc.zip
|
||||
cp -R *.dll "/c/Program Files (x86)/NSIS/Plugins/x86-ansi/"
|
||||
cd ..; rm -rf nsis-plugin
|
||||
displayName: 'Install NSIS with the 7z, GetVersion, and FindProc plugins'
|
||||
condition: and(succeeded(), ${{ parameters.condition }})
|
5
azure-pipelines/templates/windows-dependency-zip.yml
Normal file
5
azure-pipelines/templates/windows-dependency-zip.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
steps:
|
||||
- bash: |
|
||||
set -ex
|
||||
choco install zip
|
||||
displayName: 'Install zip'
|
@@ -1,2 +0,0 @@
|
||||
add_subdirectory(ai)
|
||||
add_subdirectory(game)
|
@@ -1,42 +0,0 @@
|
||||
set(AI_COMPAT_SOURCE_FILES
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_0.7.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.0.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.1.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.2.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.3.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.4.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.5.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.6.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.7.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.8.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.9.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.10.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.11.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_12.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_13.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_14.nut
|
||||
)
|
||||
|
||||
foreach(AI_COMPAT_SOURCE_FILE IN LISTS AI_COMPAT_SOURCE_FILES)
|
||||
string(REPLACE "${CMAKE_SOURCE_DIR}/bin/" "" AI_COMPAT_SOURCE_FILE_NAME "${AI_COMPAT_SOURCE_FILE}")
|
||||
string(CONCAT AI_COMPAT_BINARY_FILE "${CMAKE_BINARY_DIR}/" "${AI_COMPAT_SOURCE_FILE_NAME}")
|
||||
|
||||
add_custom_command(OUTPUT ${AI_COMPAT_BINARY_FILE}
|
||||
COMMAND ${CMAKE_COMMAND} -E copy
|
||||
${AI_COMPAT_SOURCE_FILE}
|
||||
${AI_COMPAT_BINARY_FILE}
|
||||
MAIN_DEPENDENCY ${AI_COMPAT_SOURCE_FILE}
|
||||
COMMENT "Copying ${AI_COMPAT_SOURCE_FILE_NAME}"
|
||||
)
|
||||
|
||||
list(APPEND AI_COMPAT_BINARY_FILES ${AI_COMPAT_BINARY_FILE})
|
||||
endforeach()
|
||||
|
||||
# Create a new target which copies all compat files
|
||||
add_custom_target(ai_compat_files
|
||||
DEPENDS ${AI_COMPAT_BINARY_FILES}
|
||||
)
|
||||
|
||||
add_dependencies(openttd
|
||||
ai_compat_files
|
||||
)
|
@@ -379,16 +379,3 @@ AIGroup.CreateGroup <- function(vehicle_type)
|
||||
{
|
||||
return AIGroup._CreateGroup(vehicle_type, AIGroup.GROUP_INVALID);
|
||||
}
|
||||
|
||||
/* 13 really checks RoadType against RoadType */
|
||||
AIRoad._HasRoadType <- AIRoad.HasRoadType;
|
||||
AIRoad.HasRoadType <- function(tile, road_type)
|
||||
{
|
||||
local list = AIRoadTypeList(AIRoad.GetRoadTramType(road_type));
|
||||
foreach (rt, _ in list) {
|
||||
if (AIRoad._HasRoadType(tile, rt)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@@ -131,16 +131,3 @@ AIGroup.CreateGroup <- function(vehicle_type)
|
||||
{
|
||||
return AIGroup._CreateGroup(vehicle_type, AIGroup.GROUP_INVALID);
|
||||
}
|
||||
|
||||
/* 13 really checks RoadType against RoadType */
|
||||
AIRoad._HasRoadType <- AIRoad.HasRoadType;
|
||||
AIRoad.HasRoadType <- function(tile, road_type)
|
||||
{
|
||||
local list = AIRoadTypeList(AIRoad.GetRoadTramType(road_type));
|
||||
foreach (rt, _ in list) {
|
||||
if (AIRoad._HasRoadType(tile, rt)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@@ -68,16 +68,3 @@ AIGroup.CreateGroup <- function(vehicle_type)
|
||||
{
|
||||
return AIGroup._CreateGroup(vehicle_type, AIGroup.GROUP_INVALID);
|
||||
}
|
||||
|
||||
/* 13 really checks RoadType against RoadType */
|
||||
AIRoad._HasRoadType <- AIRoad.HasRoadType;
|
||||
AIRoad.HasRoadType <- function(tile, road_type)
|
||||
{
|
||||
local list = AIRoadTypeList(AIRoad.GetRoadTramType(road_type));
|
||||
foreach (rt, _ in list) {
|
||||
if (AIRoad._HasRoadType(tile, rt)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@@ -4,18 +4,3 @@
|
||||
* OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
* See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
AILog.Info("1.10 API compatibility in effect.");
|
||||
|
||||
/* 13 really checks RoadType against RoadType */
|
||||
AIRoad._HasRoadType <- AIRoad.HasRoadType;
|
||||
AIRoad.HasRoadType <- function(tile, road_type)
|
||||
{
|
||||
local list = AIRoadTypeList(AIRoad.GetRoadTramType(road_type));
|
||||
foreach (rt, _ in list) {
|
||||
if (AIRoad._HasRoadType(tile, rt)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@@ -1,21 +0,0 @@
|
||||
/*
|
||||
* This file is part of OpenTTD.
|
||||
* OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
* OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
* See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
AILog.Info("1.11 API compatibility in effect.");
|
||||
|
||||
/* 13 really checks RoadType against RoadType */
|
||||
AIRoad._HasRoadType <- AIRoad.HasRoadType;
|
||||
AIRoad.HasRoadType <- function(tile, road_type)
|
||||
{
|
||||
local list = AIRoadTypeList(AIRoad.GetRoadTramType(road_type));
|
||||
foreach (rt, _ in list) {
|
||||
if (AIRoad._HasRoadType(tile, rt)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
@@ -20,16 +20,3 @@ AIGroup.CreateGroup <- function(vehicle_type)
|
||||
{
|
||||
return AIGroup._CreateGroup(vehicle_type, AIGroup.GROUP_INVALID);
|
||||
}
|
||||
|
||||
/* 13 really checks RoadType against RoadType */
|
||||
AIRoad._HasRoadType <- AIRoad.HasRoadType;
|
||||
AIRoad.HasRoadType <- function(tile, road_type)
|
||||
{
|
||||
local list = AIRoadTypeList(AIRoad.GetRoadTramType(road_type));
|
||||
foreach (rt, _ in list) {
|
||||
if (AIRoad._HasRoadType(tile, rt)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@@ -20,16 +20,3 @@ AIGroup.CreateGroup <- function(vehicle_type)
|
||||
{
|
||||
return AIGroup._CreateGroup(vehicle_type, AIGroup.GROUP_INVALID);
|
||||
}
|
||||
|
||||
/* 13 really checks RoadType against RoadType */
|
||||
AIRoad._HasRoadType <- AIRoad.HasRoadType;
|
||||
AIRoad.HasRoadType <- function(tile, road_type)
|
||||
{
|
||||
local list = AIRoadTypeList(AIRoad.GetRoadTramType(road_type));
|
||||
foreach (rt, _ in list) {
|
||||
if (AIRoad._HasRoadType(tile, rt)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@@ -20,16 +20,3 @@ AIGroup.CreateGroup <- function(vehicle_type)
|
||||
{
|
||||
return AIGroup._CreateGroup(vehicle_type, AIGroup.GROUP_INVALID);
|
||||
}
|
||||
|
||||
/* 13 really checks RoadType against RoadType */
|
||||
AIRoad._HasRoadType <- AIRoad.HasRoadType;
|
||||
AIRoad.HasRoadType <- function(tile, road_type)
|
||||
{
|
||||
local list = AIRoadTypeList(AIRoad.GetRoadTramType(road_type));
|
||||
foreach (rt, _ in list) {
|
||||
if (AIRoad._HasRoadType(tile, rt)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@@ -20,16 +20,3 @@ AIGroup.CreateGroup <- function(vehicle_type)
|
||||
{
|
||||
return AIGroup._CreateGroup(vehicle_type, AIGroup.GROUP_INVALID);
|
||||
}
|
||||
|
||||
/* 13 really checks RoadType against RoadType */
|
||||
AIRoad._HasRoadType <- AIRoad.HasRoadType;
|
||||
AIRoad.HasRoadType <- function(tile, road_type)
|
||||
{
|
||||
local list = AIRoadTypeList(AIRoad.GetRoadTramType(road_type));
|
||||
foreach (rt, _ in list) {
|
||||
if (AIRoad._HasRoadType(tile, rt)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@@ -20,16 +20,3 @@ AIGroup.CreateGroup <- function(vehicle_type)
|
||||
{
|
||||
return AIGroup._CreateGroup(vehicle_type, AIGroup.GROUP_INVALID);
|
||||
}
|
||||
|
||||
/* 13 really checks RoadType against RoadType */
|
||||
AIRoad._HasRoadType <- AIRoad.HasRoadType;
|
||||
AIRoad.HasRoadType <- function(tile, road_type)
|
||||
{
|
||||
local list = AIRoadTypeList(AIRoad.GetRoadTramType(road_type));
|
||||
foreach (rt, _ in list) {
|
||||
if (AIRoad._HasRoadType(tile, rt)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@@ -20,16 +20,3 @@ AIGroup.CreateGroup <- function(vehicle_type)
|
||||
{
|
||||
return AIGroup._CreateGroup(vehicle_type, AIGroup.GROUP_INVALID);
|
||||
}
|
||||
|
||||
/* 13 really checks RoadType against RoadType */
|
||||
AIRoad._HasRoadType <- AIRoad.HasRoadType;
|
||||
AIRoad.HasRoadType <- function(tile, road_type)
|
||||
{
|
||||
local list = AIRoadTypeList(AIRoad.GetRoadTramType(road_type));
|
||||
foreach (rt, _ in list) {
|
||||
if (AIRoad._HasRoadType(tile, rt)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@@ -20,16 +20,3 @@ AIGroup.CreateGroup <- function(vehicle_type)
|
||||
{
|
||||
return AIGroup._CreateGroup(vehicle_type, AIGroup.GROUP_INVALID);
|
||||
}
|
||||
|
||||
/* 13 really checks RoadType against RoadType */
|
||||
AIRoad._HasRoadType <- AIRoad.HasRoadType;
|
||||
AIRoad.HasRoadType <- function(tile, road_type)
|
||||
{
|
||||
local list = AIRoadTypeList(AIRoad.GetRoadTramType(road_type));
|
||||
foreach (rt, _ in list) {
|
||||
if (AIRoad._HasRoadType(tile, rt)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@@ -6,16 +6,3 @@
|
||||
*/
|
||||
|
||||
AILog.Info("1.9 API compatibility in effect.");
|
||||
|
||||
/* 13 really checks RoadType against RoadType */
|
||||
AIRoad._HasRoadType <- AIRoad.HasRoadType;
|
||||
AIRoad.HasRoadType <- function(tile, road_type)
|
||||
{
|
||||
local list = AIRoadTypeList(AIRoad.GetRoadTramType(road_type));
|
||||
foreach (rt, _ in list) {
|
||||
if (AIRoad._HasRoadType(tile, rt)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@@ -1,21 +0,0 @@
|
||||
/*
|
||||
* This file is part of OpenTTD.
|
||||
* OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
* OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
* See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
AILog.Info("12 API compatibility in effect.");
|
||||
|
||||
/* 13 really checks RoadType against RoadType */
|
||||
AIRoad._HasRoadType <- AIRoad.HasRoadType;
|
||||
AIRoad.HasRoadType <- function(tile, road_type)
|
||||
{
|
||||
local list = AIRoadTypeList(AIRoad.GetRoadTramType(road_type));
|
||||
foreach (rt, _ in list) {
|
||||
if (AIRoad._HasRoadType(tile, rt)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
@@ -1,8 +0,0 @@
|
||||
/*
|
||||
* This file is part of OpenTTD.
|
||||
* OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
* OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
* See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
AILog.Info("13 API compatibility in effect.");
|
@@ -1,6 +0,0 @@
|
||||
/*
|
||||
* This file is part of OpenTTD.
|
||||
* OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
* OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
* See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
67
bin/ai/regression/completeness.sh
Executable file
67
bin/ai/regression/completeness.sh
Executable file
@@ -0,0 +1,67 @@
|
||||
#!/bin/sh
|
||||
|
||||
if ! [ -f ai/regression/completeness.sh ]; then
|
||||
echo "Make sure you are in the root of OpenTTD before starting this script."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cat ai/regression/tst_*/main.nut | tr ';' '\n' | awk '
|
||||
/^function/ {
|
||||
for (local in locals) {
|
||||
delete locals[local]
|
||||
}
|
||||
if (match($0, "function Regression::Start") || match($0, "function Regression::Stop")) next
|
||||
locals["this"] = "AIControllerSquirrel"
|
||||
}
|
||||
|
||||
/local/ {
|
||||
gsub(".*local", "local")
|
||||
if (match($4, "^AI")) {
|
||||
sub("\\(.*", "", $4)
|
||||
locals[$2] = $4
|
||||
}
|
||||
}
|
||||
|
||||
/Valuate/ {
|
||||
gsub(".*Valuate\\(", "")
|
||||
gsub("\\).*", "")
|
||||
gsub(",.*", "")
|
||||
gsub("\\.", "::")
|
||||
print $0
|
||||
}
|
||||
|
||||
/\./ {
|
||||
for (local in locals) {
|
||||
if (match($0, local ".")) {
|
||||
fname = substr($0, index($0, local "."))
|
||||
sub("\\(.*", "", fname)
|
||||
sub("\\.", "::", fname)
|
||||
sub(local, locals[local], fname)
|
||||
print fname
|
||||
if (match(locals[local], "List")) {
|
||||
sub(locals[local], "AIAbstractList", fname)
|
||||
print fname
|
||||
}
|
||||
}
|
||||
}
|
||||
# We want to remove everything before the FIRST occurrence of AI.
|
||||
# If we do not remove any other occurrences of AI from the string
|
||||
# we will remove everything before the LAST occurrence of AI, so
|
||||
# do some little magic to make it work the way we want.
|
||||
sub("AI", "AXXXXY")
|
||||
gsub("AI", "AXXXXX")
|
||||
sub(".*AXXXXY", "AI")
|
||||
if (match($0, "^AI") && match($0, ".")) {
|
||||
sub("\\(.*", "", $0)
|
||||
sub("\\.", "::", $0)
|
||||
print $0
|
||||
}
|
||||
}
|
||||
' | sed 's/ //g' | sort | uniq > tmp.in_regression
|
||||
|
||||
grep 'DefSQ.*Method' ../src/script/api/ai/*.hpp.sq | grep -v 'AIError::' | grep -v 'AIAbstractList::Valuate' | grep -v '::GetClassName' | sed 's/^[^,]*, &//g;s/,[^,]*//g' | sort > tmp.in_api
|
||||
|
||||
diff -u tmp.in_regression tmp.in_api | grep -v '^+++' | grep '^+' | sed 's/^+//'
|
||||
|
||||
rm -f tmp.in_regression tmp.in_api
|
||||
|
BIN
bin/ai/regression/empty.sav
Normal file
BIN
bin/ai/regression/empty.sav
Normal file
Binary file not shown.
@@ -4,10 +4,9 @@ class Regression extends AIInfo {
|
||||
function GetShortName() { return "REGR"; }
|
||||
function GetDescription() { return "This runs regression-tests on some commands. On the same map the result should always be the same."; }
|
||||
function GetVersion() { return 1; }
|
||||
function GetAPIVersion() { return "14"; }
|
||||
function GetAPIVersion() { return "1.10"; }
|
||||
function GetDate() { return "2007-03-18"; }
|
||||
function CreateInstance() { return "Regression"; }
|
||||
function UseAsRandomAI() { return false; }
|
||||
}
|
||||
|
||||
RegisterAI(Regression());
|
69
bin/ai/regression/run.sh
Executable file
69
bin/ai/regression/run.sh
Executable file
@@ -0,0 +1,69 @@
|
||||
#!/bin/sh
|
||||
|
||||
if ! [ -f ai/regression/run.sh ]; then
|
||||
echo "Make sure you are in the root of OpenTTD before starting this script."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -f scripts/game_start.scr ]; then
|
||||
mv scripts/game_start.scr scripts/game_start.scr.regression
|
||||
fi
|
||||
|
||||
params=""
|
||||
gdb=""
|
||||
if [ "$1" != "-r" ]; then
|
||||
params="-snull -mnull -vnull:ticks=30000"
|
||||
fi
|
||||
if [ "$1" = "-g" ]; then
|
||||
gdb="gdb --ex run --args "
|
||||
fi
|
||||
|
||||
if [ -d "ai/regression/tst_$1" ]; then
|
||||
tests="ai/regression/tst_$1"
|
||||
elif [ -d "ai/regression/tst_$2" ]; then
|
||||
tests="ai/regression/tst_$2"
|
||||
else
|
||||
tests=ai/regression/tst_*
|
||||
fi
|
||||
|
||||
ret=0
|
||||
for tst in $tests; do
|
||||
echo -n "Running $tst... "
|
||||
|
||||
# Make sure that only one info.nut is present for each test run. Otherwise openttd gets confused.
|
||||
cp ai/regression/regression_info.nut $tst/info.nut
|
||||
|
||||
sav=$tst/test.sav
|
||||
if ! [ -f $sav ]; then
|
||||
sav=ai/regression/empty.sav
|
||||
fi
|
||||
|
||||
if [ -n "$gdb" ]; then
|
||||
$gdb ./openttd -x -c ai/regression/regression.cfg $params -g $sav
|
||||
else
|
||||
./openttd -x -c ai/regression/regression.cfg $params -g $sav -d script=2 -d misc=9 2>&1 | awk '{ gsub("0x(\\(nil\\)|0+)(x0)?", "0x00000000", $0); gsub("^dbg: \\[script\\]", "", $0); gsub("^ ", "ERROR: ", $0); gsub("ERROR: \\[1\\] ", "", $0); gsub("\\[P\\] ", "", $0); print $0; }' | grep -v '^dbg: \[.*\]' > $tst/tmp.regression
|
||||
fi
|
||||
|
||||
if [ -z "$gdb" ]; then
|
||||
res="`diff -ub $tst/result.txt $tst/tmp.regression`"
|
||||
if [ -z "$res" ]; then
|
||||
echo "passed!"
|
||||
else
|
||||
echo "failed! Difference:"
|
||||
echo "$res"
|
||||
ret=1
|
||||
fi
|
||||
fi
|
||||
|
||||
rm $tst/info.nut
|
||||
|
||||
if [ "$1" != "-k" ]; then
|
||||
rm -f $tst/tmp.regression
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -f scripts/game_start.scr.regression ]; then
|
||||
mv scripts/game_start.scr.regression scripts/game_start.scr
|
||||
fi
|
||||
|
||||
exit $ret
|
152
bin/ai/regression/run.vbs
Normal file
152
bin/ai/regression/run.vbs
Normal file
@@ -0,0 +1,152 @@
|
||||
Option Explicit
|
||||
|
||||
' This file is part of OpenTTD.
|
||||
' OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
' OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
' See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Dim FSO
|
||||
Set FSO = CreateObject("Scripting.FileSystemObject")
|
||||
|
||||
Function GetTestList()
|
||||
Dim retests, i, tests, dir
|
||||
Set retests = New RegExp
|
||||
Set GetTestList = CreateObject("Scripting.Dictionary")
|
||||
|
||||
retests.Pattern = "ai/regression/tst_*"
|
||||
retests.Global = True
|
||||
For i = 0 To WScript.Arguments.Count - 1
|
||||
Dim test
|
||||
test = "ai/regression/tst_" & WScript.Arguments.Item(i)
|
||||
If FSO.FolderExists(test) Then
|
||||
retests.Pattern = test
|
||||
Exit For
|
||||
End If
|
||||
Next
|
||||
|
||||
For Each dir In FSO.GetFolder("ai/regression/").SubFolders
|
||||
Dim name
|
||||
name = "ai/regression/" & dir.Name
|
||||
If retests.Test(name) Then
|
||||
GetTestList.Add name, name
|
||||
End If
|
||||
Next
|
||||
End Function
|
||||
|
||||
Function GetParams()
|
||||
GetParams = "-snull -mnull -vnull:ticks=30000"
|
||||
If WScript.Arguments.Count = 0 Then Exit Function
|
||||
If WScript.Arguments.Item(0) <> "-r" Then Exit Function
|
||||
GetParams = ""
|
||||
End Function
|
||||
|
||||
Sub FilterFile(filename)
|
||||
Dim lines, filter, file
|
||||
|
||||
Set file = FSO.OpenTextFile(filename, 1)
|
||||
If Not file.AtEndOfStream Then
|
||||
lines = file.ReadAll
|
||||
End If
|
||||
file.Close
|
||||
|
||||
Set filter = New RegExp
|
||||
filter.Global = True
|
||||
filter.Multiline = True
|
||||
filter.Pattern = "0x(\(nil\)|0+)(x0)?"
|
||||
lines = filter.Replace(lines, "0x00000000")
|
||||
filter.Pattern = "^dbg: \[script\]"
|
||||
lines = filter.Replace(lines, "")
|
||||
filter.Pattern = "^ "
|
||||
lines = filter.Replace(lines, "ERROR: ")
|
||||
filter.Pattern = "ERROR: \[1\] \[P\] "
|
||||
lines = filter.Replace(lines, "")
|
||||
filter.Pattern = "^dbg: .*\r\n"
|
||||
lines = filter.Replace(lines, "")
|
||||
|
||||
Set file = FSO.OpenTextFile(filename, 2)
|
||||
file.Write lines
|
||||
file.Close
|
||||
End Sub
|
||||
|
||||
Function CompareFiles(filename1, filename2)
|
||||
Dim file, lines1, lines2
|
||||
Set file = FSO.OpenTextFile(filename1, 1)
|
||||
If Not file.AtEndOfStream Then
|
||||
lines1 = file.ReadAll
|
||||
End IF
|
||||
file.Close
|
||||
Set file = FSO.OpenTextFile(filename2, 1)
|
||||
If Not file.AtEndOfStream Then
|
||||
lines2 = file.ReadAll
|
||||
End IF
|
||||
file.Close
|
||||
CompareFiles = (lines1 = lines2)
|
||||
End Function
|
||||
|
||||
Function RunTest(test, params, ret)
|
||||
Dim WshShell, oExec, sav, command
|
||||
Set WshShell = CreateObject("WScript.Shell")
|
||||
|
||||
' Make sure that only one info.nut is present for each test run. Otherwise openttd gets confused.
|
||||
FSO.CopyFile "ai/regression/regression_info.nut", test & "/info.nut"
|
||||
|
||||
sav = test & "/test.sav"
|
||||
If Not FSO.FileExists(sav) Then
|
||||
sav = "ai/regression/empty.sav"
|
||||
End If
|
||||
|
||||
command = ".\openttd -x -c ai/regression/regression.cfg " & params & " -g " & sav & " -d script=2 -d misc=9"
|
||||
' 2>&1 must be after >tmp.regression, else stderr is not redirected to the file
|
||||
WshShell.Run "cmd /c " & command & " >"& test & "/tmp.regression 2>&1", 0, True
|
||||
|
||||
FilterFile test & "/tmp.regression"
|
||||
|
||||
If CompareFiles(test & "/result.txt", test & "/tmp.regression") Then
|
||||
RunTest = "passed!"
|
||||
Else
|
||||
RunTest = "failed!"
|
||||
ret = 1
|
||||
End If
|
||||
|
||||
FSO.DeleteFile test & "/info.nut"
|
||||
|
||||
If WScript.Arguments.Count > 0 Then
|
||||
If WScript.Arguments.Item(0) = "-k" Then
|
||||
Exit Function
|
||||
End If
|
||||
End If
|
||||
|
||||
FSO.DeleteFile test & "/tmp.regression"
|
||||
End Function
|
||||
|
||||
On Error Resume Next
|
||||
WScript.StdOut.WriteLine ""
|
||||
If Err.Number <> 0 Then
|
||||
WScript.Echo "This script must be started with cscript."
|
||||
WScript.Quit 1
|
||||
End If
|
||||
On Error Goto 0
|
||||
|
||||
If Not FSO.FileExists("ai/regression/run.vbs") Then
|
||||
WScript.Echo "Make sure you are in the root of OpenTTD before starting this script."
|
||||
WScript.Quit 1
|
||||
End If
|
||||
|
||||
If FSO.FileExists("scripts/game_start.scr") Then
|
||||
FSO.MoveFile "scripts/game_start.scr", "scripts/game_start.scr.regression"
|
||||
End If
|
||||
|
||||
Dim params, test, ret
|
||||
params = GetParams()
|
||||
ret = 0
|
||||
|
||||
For Each test in GetTestList()
|
||||
WScript.StdOut.Write "Running " & test & "... "
|
||||
WScript.StdOut.WriteLine RunTest(test, params, ret)
|
||||
Next
|
||||
|
||||
If FSO.FileExists("scripts/game_start.scr.regression") Then
|
||||
FSO.MoveFile "scripts/game_start.scr.regression", "scripts/game_start.scr"
|
||||
End If
|
||||
|
||||
WScript.Quit ret
|
@@ -332,8 +332,7 @@ function Regression::Cargo()
|
||||
for (local i = -1; i < 15; i++) {
|
||||
print(" Cargo " + i);
|
||||
print(" IsValidCargo(): " + AICargo.IsValidCargo(i));
|
||||
print(" GetName(): '" + AICargo.GetName(i) + "'");
|
||||
print(" GetCargoLabel(): '" + AICargo.GetCargoLabel(i) + "'");
|
||||
print(" GetCargoLabel(): '" + AICargo.GetCargoLabel(i)+ "'");
|
||||
print(" IsFreight(): " + AICargo.IsFreight(i));
|
||||
print(" HasCargoClass(): " + AICargo.HasCargoClass(i, AICargo.CC_PASSENGERS));
|
||||
print(" GetTownEffect(): " + AICargo.GetTownEffect(i));
|
||||
@@ -341,7 +340,6 @@ function Regression::Cargo()
|
||||
print(" GetCargoIncome(10, 10): " + AICargo.GetCargoIncome(i, 10, 10));
|
||||
print(" GetCargoIncome(100, 10): " + AICargo.GetCargoIncome(i, 100, 10));
|
||||
print(" GetCargoIncome(10, 100): " + AICargo.GetCargoIncome(i, 10, 100));
|
||||
print(" GetWeight(100): " + AICargo.GetWeight(i, 100));
|
||||
print(" GetRoadVehicleTypeForCargo(): " + AIRoad.GetRoadVehicleTypeForCargo(i));
|
||||
}
|
||||
}
|
||||
@@ -428,31 +426,30 @@ function Regression::Company()
|
||||
print(" GetCompanyHQ(): " + AICompany.GetCompanyHQ(AICompany.COMPANY_SELF));
|
||||
print(" BuildCompanyHQ(): " + AICompany.BuildCompanyHQ(AIMap.GetTileIndex(129, 129)));
|
||||
print(" GetCompanyHQ(): " + AICompany.GetCompanyHQ(AICompany.COMPANY_SELF));
|
||||
print(" BuildCompanyHQ(): " + AICompany.BuildCompanyHQ(AIMap.GetTileIndex(239, 76)));
|
||||
print(" BuildCompanyHQ(): " + AICompany.BuildCompanyHQ(AIMap.GetTileIndex(129, 128)));
|
||||
print(" GetLastErrorString(): " + AIError.GetLastErrorString());
|
||||
print(" GetAutoRenewStatus(): " + AICompany.GetAutoRenewStatus(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewStatus(true): " + AICompany.SetAutoRenewStatus(true));
|
||||
print(" GetAutoRenewStatus(): " + AICompany.GetAutoRenewStatus(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewStatus(true): " + AICompany.SetAutoRenewStatus(true));
|
||||
print(" SetAutoRenewStatus(false): " + AICompany.SetAutoRenewStatus(false));
|
||||
print(" GetAutoRenewStatus(): " + AICompany.GetAutoRenewStatus(AICompany.COMPANY_SELF));
|
||||
print(" GetAutoRenewMonths(): " + AICompany.GetAutoRenewMonths(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMonths(-12): " + AICompany.SetAutoRenewMonths(-12));
|
||||
print(" GetAutoRenewMonths(): " + AICompany.GetAutoRenewMonths(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMonths(-12): " + AICompany.SetAutoRenewMonths(-12));
|
||||
print(" SetAutoRenewMonths(6): " + AICompany.SetAutoRenewMonths(6));
|
||||
print(" GetAutoRenewMoney(): " + AICompany.GetAutoRenewMoney(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMoney(200000): " + AICompany.SetAutoRenewMoney(200000));
|
||||
print(" GetAutoRenewMoney(): " + AICompany.GetAutoRenewMoney(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMoney(200000): " + AICompany.SetAutoRenewMoney(200000));
|
||||
print(" SetAutoRenewMoney(100000): " + AICompany.SetAutoRenewMoney(100000));
|
||||
print(" GetAutoRenewStatus(); " + AICompany.GetAutoRenewStatus(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewStatus(true); " + AICompany.SetAutoRenewStatus(true));
|
||||
print(" GetAutoRenewStatus(); " + AICompany.GetAutoRenewStatus(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewStatus(true); " + AICompany.SetAutoRenewStatus(true));
|
||||
print(" SetAutoRenewStatus(false); " + AICompany.SetAutoRenewStatus(false));
|
||||
print(" GetAutoRenewMonths(); " + AICompany.GetAutoRenewMonths(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMonths(-12); " + AICompany.SetAutoRenewMonths(-12));
|
||||
print(" GetAutoRenewMonths(); " + AICompany.GetAutoRenewMonths(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMonths(-12); " + AICompany.SetAutoRenewMonths(-12));
|
||||
print(" SetAutoRenewMonths(6); " + AICompany.SetAutoRenewMonths(6));
|
||||
print(" GetAutoRenewMoney(); " + AICompany.GetAutoRenewMoney(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMoney(200000); " + AICompany.SetAutoRenewMoney(200000));
|
||||
print(" GetAutoRenewMoney(); " + AICompany.GetAutoRenewMoney(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMoney(200000); " + AICompany.SetAutoRenewMoney(200000));
|
||||
print(" SetAutoRenewMoney(100000); " + AICompany.SetAutoRenewMoney(100000));
|
||||
for (local i = -1; i <= AICompany.EARLIEST_QUARTER; i++) {
|
||||
print(" Quarter: " + i);
|
||||
print(" GetQuarterlyIncome(): " + AICompany.GetQuarterlyIncome(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyExpenses(): " + AICompany.GetQuarterlyExpenses(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyCargoDelivered(): " + AICompany.GetQuarterlyCargoDelivered(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyPerformanceRating(): " + AICompany.GetQuarterlyPerformanceRating(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyCompanyValue(): " + AICompany.GetQuarterlyCompanyValue(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyIncome(); " + AICompany.GetQuarterlyIncome(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyExpenses(); " + AICompany.GetQuarterlyExpenses(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyCargoDelivered(); " + AICompany.GetQuarterlyCargoDelivered(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyPerformanceRating(); " + AICompany.GetQuarterlyPerformanceRating(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyCompanyValue(); " + AICompany.GetQuarterlyCompanyValue(AICompany.COMPANY_SELF, i));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -548,8 +545,6 @@ function Regression::Prices()
|
||||
print(" BT_DOCK: " + AIMarine.GetBuildCost(AIMarine.BT_DOCK));
|
||||
print(" BT_DEPOT: " + AIMarine.GetBuildCost(AIMarine.BT_DEPOT));
|
||||
print(" BT_BUOY: " + AIMarine.GetBuildCost(AIMarine.BT_BUOY));
|
||||
print(" BT_LOCK: " + AIMarine.GetBuildCost(AIMarine.BT_LOCK));
|
||||
print(" BT_CANAL: " + AIMarine.GetBuildCost(AIMarine.BT_CANAL));
|
||||
print(" -Tile-");
|
||||
print(" BT_FOUNDATION: " + AITile.GetBuildCost(AITile.BT_FOUNDATION));
|
||||
print(" BT_TERRAFORM: " + AITile.GetBuildCost(AITile.BT_TERRAFORM));
|
||||
@@ -559,26 +554,6 @@ function Regression::Prices()
|
||||
print(" BT_CLEAR_ROCKY: " + AITile.GetBuildCost(AITile.BT_CLEAR_ROCKY));
|
||||
print(" BT_CLEAR_FIELDS: " + AITile.GetBuildCost(AITile.BT_CLEAR_FIELDS));
|
||||
print(" BT_CLEAR_HOUSE: " + AITile.GetBuildCost(AITile.BT_CLEAR_HOUSE));
|
||||
print(" BT_CLEAR_WATER: " + AITile.GetBuildCost(AITile.BT_CLEAR_WATER));
|
||||
}
|
||||
|
||||
function Regression::Commands()
|
||||
{
|
||||
print("");
|
||||
print("--Commands--");
|
||||
|
||||
print(" -Command accounting-");
|
||||
local test = AITestMode();
|
||||
local costs = AIAccounting();
|
||||
AITile.DemolishTile(2834);
|
||||
print(" Command cost: " + costs.GetCosts());
|
||||
{
|
||||
local inner = AIAccounting();
|
||||
print(" New inner cost scope: " + costs.GetCosts());
|
||||
AITile.DemolishTile(2835);
|
||||
print(" Further command cost: " + costs.GetCosts());
|
||||
}
|
||||
print(" Saved cost of outer scope: " + costs.GetCosts());
|
||||
}
|
||||
|
||||
function cost_callback(old_path, new_tile, new_direction, self) { if (old_path == null) return 0; return old_path.GetCost() + 1; }
|
||||
@@ -942,9 +917,6 @@ function Regression::Marine()
|
||||
|
||||
print(" BuildWaterDepot(): " + AIMarine.BuildWaterDepot(28479, 28480));
|
||||
print(" BuildDock(): " + AIMarine.BuildDock(29253, AIStation.STATION_JOIN_ADJACENT));
|
||||
print(" BuildBuoy(): " + AIMarine.BuildBuoy(28481));
|
||||
print(" BuildLock(): " + AIMarine.BuildLock(28487));
|
||||
print(" BuildCanal(): " + AIMarine.BuildCanal(28744));
|
||||
}
|
||||
|
||||
function Regression::Order()
|
||||
@@ -1043,30 +1015,6 @@ function Regression::Rail()
|
||||
print(" IsRailTile(): " + AIRail.IsRailTile(10002));
|
||||
print(" BuildRailTrack(): " + AIRail.BuildRailTrack(10002, AIRail.RAILTRACK_NW_SE));
|
||||
print(" BuildSignal(): " + AIRail.BuildSignal(10002, 10258, AIRail.SIGNALTYPE_PBS));
|
||||
print(" GetSignalType(): " + AIRail.GetSignalType(10002, 10258));
|
||||
print(" GetSignalType(): " + AIRail.GetSignalType(10002, 9746));
|
||||
print(" RemoveSignal(): " + AIRail.RemoveSignal(10002, 10258));
|
||||
print(" BuildSignal(): " + AIRail.BuildSignal(10002, 9746, AIRail.SIGNALTYPE_ENTRY));
|
||||
print(" GetSignalType(): " + AIRail.GetSignalType(10002, 10258));
|
||||
print(" GetSignalType(): " + AIRail.GetSignalType(10002, 9746));
|
||||
print(" RemoveSignal(): " + AIRail.RemoveSignal(10002, 9746));
|
||||
print(" BuildSignal(): " + AIRail.BuildSignal(10002, 9746, AIRail.SIGNALTYPE_EXIT_TWOWAY));
|
||||
print(" GetSignalType(): " + AIRail.GetSignalType(10002, 10258));
|
||||
print(" GetSignalType(): " + AIRail.GetSignalType(10002, 9746));
|
||||
print(" RemoveRailTrack(): " + AIRail.RemoveRailTrack(10002, AIRail.RAILTRACK_NW_NE));
|
||||
print(" RemoveRailTrack(): " + AIRail.RemoveRailTrack(10002, AIRail.RAILTRACK_NW_SE));
|
||||
print(" BuildRailTrack(): " + AIRail.BuildRailTrack(10002, AIRail.RAILTRACK_NW_NE));
|
||||
print(" BuildSignal(): " + AIRail.BuildSignal(10002, 10003, AIRail.SIGNALTYPE_PBS));
|
||||
print(" GetSignalType(): " + AIRail.GetSignalType(10002, 10003));
|
||||
print(" GetSignalType(): " + AIRail.GetSignalType(10002, 10001));
|
||||
print(" RemoveSignal(): " + AIRail.RemoveSignal(10002, 10003));
|
||||
print(" BuildSignal(): " + AIRail.BuildSignal(10002, 10001, AIRail.SIGNALTYPE_ENTRY));
|
||||
print(" GetSignalType(): " + AIRail.GetSignalType(10002, 10003));
|
||||
print(" GetSignalType(): " + AIRail.GetSignalType(10002, 10001));
|
||||
print(" RemoveSignal(): " + AIRail.RemoveSignal(10002, 10001));
|
||||
print(" BuildSignal(): " + AIRail.BuildSignal(10002, 10001, AIRail.SIGNALTYPE_EXIT_TWOWAY));
|
||||
print(" GetSignalType(): " + AIRail.GetSignalType(10002, 10003));
|
||||
print(" GetSignalType(): " + AIRail.GetSignalType(10002, 10001));
|
||||
print(" RemoveRailTrack(): " + AIRail.RemoveRailTrack(10002, AIRail.RAILTRACK_NW_NE));
|
||||
print(" RemoveRailTrack(): " + AIRail.RemoveRailTrack(10002, AIRail.RAILTRACK_NW_SE));
|
||||
print(" BuildRail(): " + AIRail.BuildRail(10002, 10003, 10006));
|
||||
@@ -1107,7 +1055,6 @@ function Regression::Rail()
|
||||
print(" IsRailTile(): " + AIRail.IsRailTile(33411));
|
||||
print(" BuildRailDepot(): " + AIRail.BuildRailDepot(0, 1));
|
||||
print(" BuildRailDepot(): " + AIRail.BuildRailDepot(33411, 33411));
|
||||
print(" BuildRailDepot(): " + AIRail.BuildRailDepot(33411, 33410));
|
||||
print(" BuildRailDepot(): " + AIRail.BuildRailDepot(33411, 33414));
|
||||
print(" BuildRailDepot(): " + AIRail.BuildRailDepot(33411, 33412));
|
||||
print(" GetRailDepotFrontTile(): " + AIRail.GetRailDepotFrontTile(33411));
|
||||
@@ -1204,7 +1151,6 @@ function Regression::Road()
|
||||
print(" IsRoadTile(): " + AIRoad.IsRoadTile(33411));
|
||||
print(" BuildRoadDepot(): " + AIRoad.BuildRoadDepot(0, 1));
|
||||
print(" BuildRoadDepot(): " + AIRoad.BuildRoadDepot(33411, 33411));
|
||||
print(" BuildRoadDepot(): " + AIRoad.BuildRoadDepot(33411, 33410));
|
||||
print(" BuildRoadDepot(): " + AIRoad.BuildRoadDepot(33411, 33414));
|
||||
print(" BuildRoadDepot(): " + AIRoad.BuildRoadDepot(33411, 33412));
|
||||
print(" HasRoadType(Road): " + AIRoad.HasRoadType(33411, AIRoad.ROADTYPE_ROAD));
|
||||
@@ -1522,41 +1468,9 @@ function Regression::TileList()
|
||||
print(" " + i + " => " + list.GetValue(i));
|
||||
}
|
||||
|
||||
list.AddRectangle(0x6F3F, 0x7248);
|
||||
list.AddRectangle(54421 - 256 * 2, 256 * 2 + 54421 + 8);
|
||||
list.Valuate(AITile.IsWaterTile);
|
||||
print(" IsWaterTile(): done");
|
||||
print(" Count(): " + list.Count());
|
||||
print(" ListDump:");
|
||||
for (local i = list.Begin(); !list.IsEnd(); i = list.Next()) {
|
||||
print(" " + i + " => " + list.GetValue(i));
|
||||
}
|
||||
|
||||
list.Valuate(AITile.IsSeaTile);
|
||||
print(" IsSeaTile(): done");
|
||||
print(" Count(): " + list.Count());
|
||||
print(" ListDump:");
|
||||
for (local i = list.Begin(); !list.IsEnd(); i = list.Next()) {
|
||||
print(" " + i + " => " + list.GetValue(i));
|
||||
}
|
||||
|
||||
list.Valuate(AITile.IsRiverTile);
|
||||
print(" IsRiverTile() done");
|
||||
print(" Count(): " + list.Count());
|
||||
print(" ListDump:");
|
||||
for (local i = list.Begin(); !list.IsEnd(); i = list.Next()) {
|
||||
print(" " + i + " => " + list.GetValue(i));
|
||||
}
|
||||
|
||||
list.Valuate(AIMarine.IsCanalTile);
|
||||
print(" IsCanalTile() done");
|
||||
print(" Count(): " + list.Count());
|
||||
print(" ListDump:");
|
||||
for (local i = list.Begin(); !list.IsEnd(); i = list.Next()) {
|
||||
print(" " + i + " => " + list.GetValue(i));
|
||||
}
|
||||
|
||||
list.Valuate(AITile.IsCoastTile);
|
||||
print(" IsCoastTile() done");
|
||||
print(" Water(): done");
|
||||
print(" Count(): " + list.Count());
|
||||
print(" ListDump:");
|
||||
for (local i = list.Begin(); !list.IsEnd(); i = list.Next()) {
|
||||
@@ -1705,7 +1619,6 @@ function Regression::Vehicle()
|
||||
print(" BuildVehicle(): " + AIVehicle.BuildVehicle(33417, 153));
|
||||
print(" IsValidVehicle(12): " + AIVehicle.IsValidVehicle(12));
|
||||
print(" CloneVehicle(): " + AIVehicle.CloneVehicle(33417, 12, true));
|
||||
print(" BuildVehicle(): " + AIVehicle.BuildVehicle(-1, 153));
|
||||
|
||||
local bank_after = AICompany.GetBankBalance(AICompany.COMPANY_SELF);
|
||||
|
||||
@@ -1962,7 +1875,6 @@ function Regression::Start()
|
||||
/* Do this first as it gains maximum loan (which is faked to quite a lot). */
|
||||
this.Company();
|
||||
|
||||
this.Commands();
|
||||
this.Airport();
|
||||
this.Bridge();
|
||||
this.BridgeList();
|
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,9 @@
|
||||
class StationList extends AIController {
|
||||
class Regression extends AIController {
|
||||
function Start();
|
||||
};
|
||||
|
||||
|
||||
function StationList::StationList()
|
||||
function Regression::StationList()
|
||||
{
|
||||
local list = AIStationList(AIStation.STATION_BUS_STOP + AIStation.STATION_TRUCK_STOP);
|
||||
|
||||
@@ -27,7 +27,7 @@ function StationList::StationList()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_Cargo()
|
||||
function Regression::StationList_Cargo()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_Cargo--");
|
||||
@@ -44,7 +44,7 @@ function StationList::StationList_Cargo()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoPlanned()
|
||||
function Regression::StationList_CargoPlanned()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoPlanned--");
|
||||
@@ -58,7 +58,7 @@ function StationList::StationList_CargoPlanned()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoPlannedByFrom()
|
||||
function Regression::StationList_CargoPlannedByFrom()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoPlannedByFrom--");
|
||||
@@ -68,7 +68,7 @@ function StationList::StationList_CargoPlannedByFrom()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoPlannedByVia()
|
||||
function Regression::StationList_CargoPlannedByVia()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoPlannedByVia--");
|
||||
@@ -78,7 +78,7 @@ function StationList::StationList_CargoPlannedByVia()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoPlannedViaByFrom()
|
||||
function Regression::StationList_CargoPlannedViaByFrom()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoPlannedViaByFrom--");
|
||||
@@ -88,7 +88,7 @@ function StationList::StationList_CargoPlannedViaByFrom()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoPlannedFromByVia()
|
||||
function Regression::StationList_CargoPlannedFromByVia()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoPlannedFromByVia--");
|
||||
@@ -98,7 +98,7 @@ function StationList::StationList_CargoPlannedFromByVia()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoWaiting()
|
||||
function Regression::StationList_CargoWaiting()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoWaiting--");
|
||||
@@ -112,7 +112,7 @@ function StationList::StationList_CargoWaiting()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoWaitingByFrom()
|
||||
function Regression::StationList_CargoWaitingByFrom()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoWaitingByFrom--");
|
||||
@@ -122,7 +122,7 @@ function StationList::StationList_CargoWaitingByFrom()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoWaitingByVia()
|
||||
function Regression::StationList_CargoWaitingByVia()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoWaitingByVia--");
|
||||
@@ -132,7 +132,7 @@ function StationList::StationList_CargoWaitingByVia()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoWaitingViaByFrom()
|
||||
function Regression::StationList_CargoWaitingViaByFrom()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoWaitingViaByFrom--");
|
||||
@@ -142,7 +142,7 @@ function StationList::StationList_CargoWaitingViaByFrom()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoWaitingFromByVia()
|
||||
function Regression::StationList_CargoWaitingFromByVia()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoWaitingFromByVia--");
|
||||
@@ -152,7 +152,7 @@ function StationList::StationList_CargoWaitingFromByVia()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_Vehicle()
|
||||
function Regression::StationList_Vehicle()
|
||||
{
|
||||
local list = AIStationList_Vehicle(12);
|
||||
|
||||
@@ -196,7 +196,7 @@ function StationList::StationList_Vehicle()
|
||||
}
|
||||
}
|
||||
|
||||
function StationList::Start()
|
||||
function Regression::Start()
|
||||
{
|
||||
StationList();
|
||||
StationList_Cargo();
|
BIN
bin/ai/regression/tst_stationlist/test.sav
Normal file
BIN
bin/ai/regression/tst_stationlist/test.sav
Normal file
Binary file not shown.
Binary file not shown.
BIN
bin/baseset/opntitle.dat
Normal file
BIN
bin/baseset/opntitle.dat
Normal file
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user