mirror of
https://github.com/OpenTTD/OpenTTD.git
synced 2025-08-14 01:59:09 +00:00
Compare commits
78 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
430ce20f8d | ||
|
20007fd1f4 | ||
|
02980119e4 | ||
|
dd4aae830d | ||
|
f4ed770cff | ||
|
89c8215b79 | ||
|
911f9165cf | ||
|
2748a90bc9 | ||
|
583a2221ca | ||
|
86113008ab | ||
|
31a9f549fb | ||
|
8203adecb5 | ||
|
3d8e68f966 | ||
|
aca5d97a68 | ||
|
fd8ca95947 | ||
|
9617fa727d | ||
|
74b591c2e9 | ||
|
78e558717c | ||
|
cb9c4bf4a0 | ||
|
e39c5829a2 | ||
|
a089c876ab | ||
|
1072c74bc4 | ||
|
bb251f45fc | ||
|
21aa339901 | ||
|
3132d29805 | ||
|
94581d352a | ||
|
cf27deb675 | ||
|
40d68273aa | ||
|
2cc244bde0 | ||
|
7872b1e0b5 | ||
|
22519b3b0d | ||
|
5b2447e10c | ||
|
e0680c9ede | ||
|
ea895f05eb | ||
|
37187df7ef | ||
|
2faa89a98e | ||
|
4297cc5f21 | ||
|
591ca82845 | ||
|
e90322f6e9 | ||
|
978cc774ec | ||
|
7f77b8c1e2 | ||
|
2bf936bbbc | ||
|
eb8d79f41f | ||
|
0737458ec4 | ||
|
7e659bc3da | ||
|
612c912144 | ||
|
c8cd5f7f3a | ||
|
0c87ae1875 | ||
|
cc458c9559 | ||
|
9f0371117b | ||
|
b6d409f8a7 | ||
|
d0613bad11 | ||
|
27cbee76b4 | ||
|
7e3eabdaae | ||
|
a4d73d74a2 | ||
|
d4bd17d443 | ||
|
a82dca883f | ||
|
18ad0fa983 | ||
|
fd64c62bcd | ||
|
b542ebfb52 | ||
|
318b51c58a | ||
|
df7f642a02 | ||
|
a0dc9ef847 | ||
|
8b4501604c | ||
|
4082121511 | ||
|
40b3bc6f1a | ||
|
b5a8510b9b | ||
|
aa7dbdc42e | ||
|
2f57d65575 | ||
|
ceae49a1d8 | ||
|
dc2f10bf11 | ||
|
0643a3627f | ||
|
fd106cf67b | ||
|
3d5b729290 | ||
|
81ed69d8d4 | ||
|
bd3ba91181 | ||
|
49d2a07f66 | ||
|
887dce4655 |
@@ -10,7 +10,6 @@ notifications:
|
||||
only-by:
|
||||
- DorpsGek
|
||||
commit-comment:
|
||||
discussion:
|
||||
pull-request:
|
||||
issue:
|
||||
tag-created:
|
||||
|
7
.github/ISSUE_TEMPLATE.md
vendored
Normal file
7
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
## Version of OpenTTD
|
||||
|
||||
## Expected result
|
||||
|
||||
## Actual result
|
||||
|
||||
## Steps to reproduce
|
41
.github/ISSUE_TEMPLATE/bug.yaml
vendored
41
.github/ISSUE_TEMPLATE/bug.yaml
vendored
@@ -1,41 +0,0 @@
|
||||
name: Bug Report
|
||||
description: Found a bug in OpenTTD?
|
||||
title: "[Bug]: "
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version of OpenTTD
|
||||
description: Fill in below what version of OpenTTD you are using, including your OS.
|
||||
placeholder: ex. 1.11.2, Windows 10
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected result
|
||||
description: Describe in a few words what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: Actual result
|
||||
description: Describe in a few words what actually happens.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: As detailed as possible, please tell us how we can reproduce this. Feel free to attach a savegame (zip it first) to make it more clear.
|
||||
placeholder: |
|
||||
1. Loaded the attached savegame.
|
||||
2. Click on the button left of that other icon.
|
||||
3. The window doesn't open.
|
||||
validations:
|
||||
required: true
|
5
.github/ISSUE_TEMPLATE/config.yml
vendored
5
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +0,0 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Suggestions and ideas?
|
||||
url: https://www.tt-forums.net/viewforum.php?f=32
|
||||
about: Have a suggestion or an idea for a cool new feature? Post them on our forum!
|
37
.github/ISSUE_TEMPLATE/crash.yaml
vendored
37
.github/ISSUE_TEMPLATE/crash.yaml
vendored
@@ -1,37 +0,0 @@
|
||||
name: Crash
|
||||
description: Did OpenTTD crash?
|
||||
title: "[Crash]: "
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this crash report!
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version of OpenTTD
|
||||
description: Fill in below what version of OpenTTD you are using, including your OS.
|
||||
placeholder: ex. 1.11.2, Windows 10
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: Please spend a few words if you can reproduce this problem.
|
||||
placeholder: |
|
||||
1. Bought a new train.
|
||||
2. The game crashed.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: crashlogs
|
||||
attributes:
|
||||
label: Upload crash files
|
||||
description: With the `crash.log`, `crash.dmp`, and `crash.sav` we can analyze the crash in detail; this way you allow us to easier triage and fix the problem.
|
||||
placeholder: |
|
||||
1. Zip the `crash.log`, `crash.dmp` and `crash.sav`.
|
||||
2. Click on this field.
|
||||
3. Drag and drop the zip file in here.
|
||||
validations:
|
||||
required: true
|
49
.github/PULL_REQUEST_TEMPLATE.md
vendored
49
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,49 +0,0 @@
|
||||
## Motivation / Problem
|
||||
|
||||
<!--
|
||||
Describe here shortly
|
||||
* For bug fixes:
|
||||
* What problem does this solve?
|
||||
* If there is already an issue, link the issue, otherwise describe the problem here.
|
||||
* For features or gameplay changes:
|
||||
* What was the motivation to develop this feature?
|
||||
* Does this address any problem with the gameplay or interface?
|
||||
* Which group of players do you think would enjoy this feature?
|
||||
-->
|
||||
|
||||
|
||||
## Description
|
||||
|
||||
<!--
|
||||
Describe here shortly
|
||||
* For bug fixes:
|
||||
* How is the problem solved?
|
||||
* For features or gameplay changes:
|
||||
* What does this feature do?
|
||||
* How does it improve/solve the situation described under 'motivation'.
|
||||
-->
|
||||
|
||||
|
||||
## Limitations
|
||||
|
||||
<!--
|
||||
Describe here
|
||||
* Is the problem solved in all scenarios?
|
||||
* Is this feature complete? Are there things that could be added in the future?
|
||||
* Are there things that are intentionally left out?
|
||||
* Do you know of a bug or corner case that does not work?
|
||||
-->
|
||||
|
||||
|
||||
## Checklist for review
|
||||
|
||||
Some things are not automated, and forgotten often. This list is a reminder for the reviewers.
|
||||
* The bug fix is important enough to be backported? (label: 'backport requested')
|
||||
* This PR touches english.txt or translations? Check the [guidelines](https://github.com/OpenTTD/OpenTTD/blob/master/docs/eints.md)
|
||||
* This PR affects the save game format? (label 'savegame upgrade')
|
||||
* This PR affects the GS/AI API? (label 'needs review: Script API')
|
||||
* ai_changelog.hpp, gs_changelog.hpp need updating.
|
||||
* The compatibility wrappers (compat_*.nut) need updating.
|
||||
* This PR affects the NewGRF API? (label 'needs review: NewGRF')
|
||||
* newgrf_debug_data.h may need updating.
|
||||
* [PR must be added to API tracker](https://wiki.openttd.org/en/Development/NewGRF/Specification%20Status)
|
221
.github/unused-strings.py
vendored
221
.github/unused-strings.py
vendored
@@ -1,221 +0,0 @@
|
||||
"""
|
||||
Script to scan the OpenTTD source-tree for STR_ entries that are defined but
|
||||
no longer used.
|
||||
|
||||
This is not completely trivial, as OpenTTD references a lot of strings in
|
||||
relation to another string. The most obvious example of this is a list. OpenTTD
|
||||
only references the first entry in the list, and does "+ <var>" to get to the
|
||||
correct string.
|
||||
|
||||
There are other ways OpenTTD does use relative values. This script tries to
|
||||
account for all of them, to give the best approximation we have for "this
|
||||
string is unused".
|
||||
"""
|
||||
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from enum import Enum
|
||||
|
||||
LENGTH_NAME_LOOKUP = {
|
||||
"VEHICLE_TYPES": 4,
|
||||
}
|
||||
|
||||
|
||||
class SkipType(Enum):
|
||||
NONE = 1
|
||||
LENGTH = 2
|
||||
EXTERNAL = 3
|
||||
ZERO_IS_SPECIAL = 4
|
||||
EXPECT_NEWLINE = 5
|
||||
|
||||
|
||||
def read_language_file(filename, strings_found, errors):
|
||||
strings_defined = []
|
||||
|
||||
skip = SkipType.NONE
|
||||
length = 0
|
||||
common_prefix = ""
|
||||
last_tiny_string = ""
|
||||
|
||||
with open(filename) as fp:
|
||||
for line in fp.readlines():
|
||||
if not line.strip():
|
||||
if skip == SkipType.EXPECT_NEWLINE:
|
||||
skip = SkipType.NONE
|
||||
continue
|
||||
|
||||
line = line.strip()
|
||||
|
||||
if skip == SkipType.EXPECT_NEWLINE:
|
||||
# The only thing allowed after a list, is this next marker, or a newline.
|
||||
if line == "###next-name-looks-similar":
|
||||
# "###next-name-looks-similar"
|
||||
# Indicates the common prefix of the last list has a very
|
||||
# similar name to the next entry, but isn't part of the
|
||||
# list. So do not emit a warning about them looking very
|
||||
# similar.
|
||||
|
||||
if length != 0:
|
||||
errors.append(f"ERROR: list around {name} is shorted than indicated by ###length")
|
||||
|
||||
common_prefix = ""
|
||||
else:
|
||||
errors.append(f"ERROR: expected a newline after a list, but didn't find any around {name}. Did you add an entry to the list without increasing the length?")
|
||||
|
||||
skip = SkipType.NONE
|
||||
|
||||
if line[0] == "#":
|
||||
if line.startswith("###length "):
|
||||
# "###length <count>"
|
||||
# Indicates the next few entries are part of a list. Only
|
||||
# the first entry is possibly referenced, and the rest are
|
||||
# indirectly.
|
||||
|
||||
if length != 0:
|
||||
errors.append(f"ERROR: list around {name} is shorted than indicated by ###length")
|
||||
|
||||
length = line.split(" ")[1].strip()
|
||||
|
||||
if length.isnumeric():
|
||||
length = int(length)
|
||||
else:
|
||||
length = LENGTH_NAME_LOOKUP[length]
|
||||
|
||||
skip = SkipType.LENGTH
|
||||
elif line.startswith("###external "):
|
||||
# "###external <count>"
|
||||
# Indicates the next few entries are used outside the
|
||||
# source and will not be referenced.
|
||||
|
||||
if length != 0:
|
||||
errors.append(f"ERROR: list around {name} is shorted than indicated by ###length")
|
||||
|
||||
length = line.split(" ")[1].strip()
|
||||
length = int(length)
|
||||
|
||||
skip = SkipType.EXTERNAL
|
||||
elif line.startswith("###setting-zero-is-special"):
|
||||
# "###setting-zero-is-special"
|
||||
# Indicates the next entry is part of the "zero is special"
|
||||
# flag of settings. These entries are not referenced
|
||||
# directly in the code.
|
||||
|
||||
if length != 0:
|
||||
errors.append(f"ERROR: list around {name} is shorted than indicated by ###length")
|
||||
|
||||
skip = SkipType.ZERO_IS_SPECIAL
|
||||
|
||||
continue
|
||||
|
||||
name = line.split(":")[0].strip()
|
||||
strings_defined.append(name)
|
||||
|
||||
# If a string ends on _TINY or _SMALL, it can be the {TINY} variant.
|
||||
# Check for this by some fuzzy matching.
|
||||
if name.endswith(("_SMALL", "_TINY")):
|
||||
last_tiny_string = name
|
||||
elif last_tiny_string:
|
||||
matching_name = "_".join(last_tiny_string.split("_")[:-1])
|
||||
if name == matching_name:
|
||||
strings_found.add(last_tiny_string)
|
||||
else:
|
||||
last_tiny_string = ""
|
||||
|
||||
if skip == SkipType.EXTERNAL:
|
||||
strings_found.add(name)
|
||||
skip = SkipType.LENGTH
|
||||
|
||||
if skip == SkipType.LENGTH:
|
||||
skip = SkipType.NONE
|
||||
length -= 1
|
||||
common_prefix = name
|
||||
elif skip == SkipType.ZERO_IS_SPECIAL:
|
||||
strings_found.add(name)
|
||||
elif length > 0:
|
||||
strings_found.add(name)
|
||||
length -= 1
|
||||
|
||||
# Find the common prefix of these strings
|
||||
for i in range(len(common_prefix)):
|
||||
if common_prefix[0 : i + 1] != name[0 : i + 1]:
|
||||
common_prefix = common_prefix[0:i]
|
||||
break
|
||||
|
||||
if length == 0:
|
||||
skip = SkipType.EXPECT_NEWLINE
|
||||
|
||||
if len(common_prefix) < 6:
|
||||
errors.append(f"ERROR: common prefix of block including {name} was reduced to {common_prefix}. This means the names in the list are not consistent.")
|
||||
elif common_prefix:
|
||||
if name.startswith(common_prefix):
|
||||
errors.append(f"ERROR: {name} looks a lot like block above with prefix {common_prefix}. This mostly means that the list length was too short. Use '###next-name-looks-similar' if it is not.")
|
||||
common_prefix = ""
|
||||
|
||||
return strings_defined
|
||||
|
||||
|
||||
def scan_source_files(path, strings_found):
|
||||
for new_path in glob.glob(f"{path}/*"):
|
||||
if os.path.isdir(new_path):
|
||||
scan_source_files(new_path, strings_found)
|
||||
continue
|
||||
|
||||
if not new_path.endswith((".c", ".h", ".cpp", ".hpp", ".ini")):
|
||||
continue
|
||||
|
||||
# Most files we can just open, but some use magic, that requires the
|
||||
# G++ preprocessor before we can make sense out of it.
|
||||
if new_path == "src/table/cargo_const.h":
|
||||
p = subprocess.run(["g++", "-E", new_path], stdout=subprocess.PIPE)
|
||||
output = p.stdout.decode()
|
||||
else:
|
||||
with open(new_path) as fp:
|
||||
output = fp.read()
|
||||
|
||||
# Find all the string references.
|
||||
matches = re.findall(r"[^A-Z_](STR_[A-Z0-9_]*)", output)
|
||||
strings_found.update(matches)
|
||||
|
||||
|
||||
def main():
|
||||
strings_found = set()
|
||||
errors = []
|
||||
|
||||
scan_source_files("src", strings_found)
|
||||
strings_defined = read_language_file("src/lang/english.txt", strings_found, errors)
|
||||
|
||||
# STR_LAST_STRINGID is special, and not really a string.
|
||||
strings_found.remove("STR_LAST_STRINGID")
|
||||
# These are mentioned in comments, not really a string.
|
||||
strings_found.remove("STR_XXX")
|
||||
strings_found.remove("STR_NEWS")
|
||||
strings_found.remove("STR_CONTENT_TYPE_")
|
||||
|
||||
# This string is added for completion, but never used.
|
||||
strings_defined.remove("STR_JUST_DATE_SHORT")
|
||||
|
||||
strings_defined = sorted(strings_defined)
|
||||
strings_found = sorted(list(strings_found))
|
||||
|
||||
for string in strings_found:
|
||||
if string not in strings_defined:
|
||||
errors.append(f"ERROR: {string} found but never defined.")
|
||||
|
||||
for string in strings_defined:
|
||||
if string not in strings_found:
|
||||
errors.append(f"ERROR: {string} is (possibly) no longer needed.")
|
||||
|
||||
if errors:
|
||||
for error in errors:
|
||||
print(error)
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("OK")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
406
.github/workflows/ci-build.yml
vendored
406
.github/workflows/ci-build.yml
vendored
@@ -1,406 +0,0 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
CTEST_OUTPUT_ON_FAILURE: 1
|
||||
|
||||
jobs:
|
||||
emscripten:
|
||||
name: Emscripten
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
# If you change this version, change the number in the cache step too.
|
||||
image: emscripten/emsdk:2.0.10
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Setup cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /emsdk/upstream/emscripten/cache
|
||||
key: 2.0.10-${{ runner.os }}
|
||||
|
||||
- name: Build (host tools)
|
||||
run: |
|
||||
mkdir build-host
|
||||
cd build-host
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake .. -DOPTION_TOOLS_ONLY=ON
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(nproc) cores"
|
||||
cmake --build . -j $(nproc) --target tools
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
emcmake cmake .. -DHOST_BINARY_DIR=../build-host
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(nproc) cores"
|
||||
cmake --build . -j $(nproc)
|
||||
echo "::endgroup::"
|
||||
|
||||
linux:
|
||||
name: Linux
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- compiler: clang
|
||||
cxxcompiler: clang++
|
||||
libsdl: libsdl2-dev
|
||||
- compiler: gcc
|
||||
cxxcompiler: g++
|
||||
libsdl: libsdl2-dev
|
||||
- compiler: gcc
|
||||
cxxcompiler: g++
|
||||
libsdl: libsdl1.2-dev
|
||||
- compiler: gcc
|
||||
cxxcompiler: g++
|
||||
extra-cmake-parameters: -DOPTION_DEDICATED=ON
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
CC: ${{ matrix.compiler }}
|
||||
CXX: ${{ matrix.cxxcompiler }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
echo "::group::Update apt"
|
||||
sudo apt-get update
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Install dependencies"
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
liballegro4-dev \
|
||||
libfontconfig-dev \
|
||||
libicu-dev \
|
||||
liblzma-dev \
|
||||
liblzo2-dev \
|
||||
${{ matrix.libsdl }} \
|
||||
zlib1g-dev \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
|
||||
- name: Get OpenGFX
|
||||
run: |
|
||||
mkdir -p ~/.local/share/openttd/baseset
|
||||
cd ~/.local/share/openttd/baseset
|
||||
|
||||
echo "::group::Download OpenGFX"
|
||||
curl -L https://cdn.openttd.org/opengfx-releases/0.6.0/opengfx-0.6.0-all.zip -o opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Unpack OpenGFX"
|
||||
unzip opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
rm -f opengfx-all.zip
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake .. ${{ matrix.extra-cmake-parameters }}
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(nproc) cores"
|
||||
cmake --build . -j $(nproc)
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
cd build
|
||||
ctest -j $(nproc) --timeout 120
|
||||
|
||||
macos:
|
||||
name: Mac OS
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arch: x64
|
||||
full_arch: x86_64
|
||||
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.14
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Prepare cache key
|
||||
id: key
|
||||
run: |
|
||||
echo "::set-output name=image::$ImageOS-$ImageVersion"
|
||||
|
||||
- name: Enable vcpkg cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /usr/local/share/vcpkg/installed
|
||||
key: ${{ steps.key.outputs.image }}-vcpkg-${{ matrix.arch }}-0 # Increase the number whenever dependencies are modified
|
||||
restore-keys: |
|
||||
${{ steps.key.outputs.image }}-vcpkg-${{ matrix.arch }}
|
||||
|
||||
- name: Prepare vcpkg
|
||||
run: |
|
||||
vcpkg install --triplet=${{ matrix.arch }}-osx \
|
||||
liblzma \
|
||||
libpng \
|
||||
lzo \
|
||||
zlib \
|
||||
# EOF
|
||||
|
||||
- name: Install OpenGFX
|
||||
run: |
|
||||
mkdir -p ~/Documents/OpenTTD/baseset
|
||||
cd ~/Documents//OpenTTD/baseset
|
||||
|
||||
echo "::group::Download OpenGFX"
|
||||
curl -L https://cdn.openttd.org/opengfx-releases/0.6.0/opengfx-0.6.0-all.zip -o opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Unpack OpenGFX"
|
||||
unzip opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
rm -f opengfx-all.zip
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-DCMAKE_OSX_ARCHITECTURES=${{ matrix.full_arch }} \
|
||||
-DVCPKG_TARGET_TRIPLET=${{ matrix.arch }}-osx \
|
||||
-DCMAKE_TOOLCHAIN_FILE=/usr/local/share/vcpkg/scripts/buildsystems/vcpkg.cmake \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(sysctl -n hw.logicalcpu) cores"
|
||||
cmake --build . -j $(sysctl -n hw.logicalcpu)
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
cd build
|
||||
ctest -j $(sysctl -n hw.logicalcpu) --timeout 120
|
||||
|
||||
windows:
|
||||
name: Windows
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [windows-latest, windows-2016]
|
||||
arch: [x86, x64]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Prepare cache key
|
||||
id: key
|
||||
shell: powershell
|
||||
run: |
|
||||
# Work around caching failure with GNU tar
|
||||
New-Item -Type Junction -Path vcpkg -Target c:\vcpkg
|
||||
|
||||
Write-Output "::set-output name=image::$env:ImageOS-$env:ImageVersion"
|
||||
|
||||
- name: Enable vcpkg cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: vcpkg/installed
|
||||
key: ${{ steps.key.outputs.image }}-vcpkg-${{ matrix.arch }}-0 # Increase the number whenever dependencies are modified
|
||||
restore-keys: |
|
||||
${{ steps.key.outputs.image }}-vcpkg-${{ matrix.arch }}
|
||||
|
||||
- name: Prepare vcpkg
|
||||
shell: bash
|
||||
run: |
|
||||
vcpkg install --triplet=${{ matrix.arch }}-windows-static \
|
||||
liblzma \
|
||||
libpng \
|
||||
lzo \
|
||||
zlib \
|
||||
# EOF
|
||||
|
||||
- name: Install OpenGFX
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p "C:/Users/Public/Documents/OpenTTD/baseset"
|
||||
cd "C:/Users/Public/Documents/OpenTTD/baseset"
|
||||
|
||||
echo "::group::Download OpenGFX"
|
||||
curl -L https://cdn.openttd.org/opengfx-releases/0.6.0/opengfx-0.6.0-all.zip -o opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Unpack OpenGFX"
|
||||
unzip opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
rm -f opengfx-all.zip
|
||||
|
||||
- name: Install MSVC problem matcher
|
||||
uses: ammaraskar/msvc-problem-matcher@master
|
||||
|
||||
- name: Configure developer command prompt for ${{ matrix.arch }}
|
||||
uses: ilammy/msvc-dev-cmd@v1
|
||||
with:
|
||||
arch: ${{ matrix.arch }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake .. \
|
||||
-GNinja \
|
||||
-DVCPKG_TARGET_TRIPLET=${{ matrix.arch }}-windows-static \
|
||||
-DCMAKE_TOOLCHAIN_FILE="c:\vcpkg\scripts\buildsystems\vcpkg.cmake" \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
cmake --build .
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: |
|
||||
cd ${GITHUB_WORKSPACE}/build
|
||||
ctest --timeout 120
|
||||
|
||||
|
||||
msys2:
|
||||
name: msys2
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- msystem: MINGW64
|
||||
arch: x86_64
|
||||
- msystem: MINGW32
|
||||
arch: i686
|
||||
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Setup MSYS2
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
msystem: ${{ matrix.msystem }}
|
||||
release: false
|
||||
install: >-
|
||||
git
|
||||
make
|
||||
mingw-w64-${{ matrix.arch }}-cmake
|
||||
mingw-w64-${{ matrix.arch }}-gcc
|
||||
mingw-w64-${{ matrix.arch }}-lzo2
|
||||
mingw-w64-${{ matrix.arch }}-libpng
|
||||
|
||||
- name: Install OpenGFX
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p "C:/Users/Public/Documents/OpenTTD/baseset"
|
||||
cd "C:/Users/Public/Documents/OpenTTD/baseset"
|
||||
|
||||
echo "::group::Download OpenGFX"
|
||||
curl -L https://cdn.openttd.org/opengfx-releases/0.6.0/opengfx-0.6.0-all.zip -o opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Unpack OpenGFX"
|
||||
unzip opengfx-all.zip
|
||||
echo "::endgroup::"
|
||||
|
||||
rm -f opengfx-all.zip
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build
|
||||
shell: msys2 {0}
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake .. -G"MSYS Makefiles"
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(nproc) cores"
|
||||
cmake --build . -j $(nproc)
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Test
|
||||
shell: msys2 {0}
|
||||
run: |
|
||||
cd build
|
||||
ctest -j $(nproc) --timeout 120
|
||||
|
||||
check_annotations:
|
||||
name: Check Annotations
|
||||
needs:
|
||||
- emscripten
|
||||
- linux
|
||||
- macos
|
||||
- windows
|
||||
- msys2
|
||||
|
||||
if: always() && github.event_name == 'pull_request'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check annotations
|
||||
uses: OpenTTD/actions/annotation-check@v2
|
28
.github/workflows/commit-checker.yml
vendored
28
.github/workflows/commit-checker.yml
vendored
@@ -15,7 +15,31 @@ jobs:
|
||||
fetch-depth: 4
|
||||
|
||||
- name: Get pull-request commits
|
||||
uses: OpenTTD/actions/checkout-pull-request@v2
|
||||
run: |
|
||||
set -x
|
||||
# actions/checkout did a merge checkout of the pull-request. As such, the first
|
||||
# commit is the merge commit. This means that on HEAD^ is the base branch, and
|
||||
# on HEAD^2 are the commits from the pull-request. We now check if those trees
|
||||
# have a common parent. If not, we fetch a few more commits till we do. In result,
|
||||
# the log between HEAD^ and HEAD^2 will be the commits in the pull-request.
|
||||
DEPTH=4
|
||||
while [ -z "$(git merge-base HEAD^ HEAD^2)" ]; do
|
||||
git -c protocol.version=2 fetch --no-tags --prune --progress --no-recurse-submodules --deepen=${DEPTH} origin HEAD
|
||||
DEPTH=$(( ${DEPTH} * 4 ))
|
||||
done
|
||||
|
||||
# Just to show which commits we are going to evaluate.
|
||||
git log --oneline HEAD^..HEAD^2
|
||||
|
||||
- name: Checkout commit-checker
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: OpenTTD/OpenTTD-git-hooks
|
||||
path: git-hooks
|
||||
ref: master
|
||||
|
||||
- name: Check commits
|
||||
uses: OpenTTD/OpenTTD-git-hooks@main
|
||||
run: |
|
||||
set -x
|
||||
HOOKS_DIR=./git-hooks/hooks GIT_DIR=.git ./git-hooks/hooks/check-commits.sh HEAD^..HEAD^2
|
||||
echo "Commit checks passed"
|
||||
|
133
.github/workflows/preview_build.yml
vendored
133
.github/workflows/preview_build.yml
vendored
@@ -1,133 +0,0 @@
|
||||
name: Preview build
|
||||
|
||||
on:
|
||||
repository_dispatch:
|
||||
types:
|
||||
- Preview*
|
||||
|
||||
jobs:
|
||||
preview:
|
||||
name: Build preview
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
# If you change this version, change the number in the cache step too.
|
||||
image: emscripten/emsdk:2.0.10
|
||||
# uid=1001(runner) gid=121(docker)
|
||||
options: -u 1001:121
|
||||
|
||||
steps:
|
||||
- name: Update deployment status to in progress
|
||||
uses: octokit/request-action@v2.x
|
||||
with:
|
||||
route: POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses
|
||||
mediaType: |
|
||||
previews:
|
||||
- ant-man
|
||||
- flash
|
||||
owner: ${{ github.event.repository.owner.login }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
deployment_id: ${{ github.event.client_payload.deployment_id }}
|
||||
state: in_progress
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PREVIEW_GITHUB_TOKEN }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.client_payload.sha }}
|
||||
|
||||
- name: Name branch
|
||||
run: |
|
||||
name=$(echo "${{ github.event.client_payload.folder }}")
|
||||
git checkout -b ${name}
|
||||
|
||||
- name: Setup cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /emsdk/upstream/emscripten/cache
|
||||
key: 2.0.10-${{ runner.os }}
|
||||
|
||||
- name: Build (host tools)
|
||||
run: |
|
||||
mkdir build-host
|
||||
cd build-host
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake .. -DOPTION_TOOLS_ONLY=ON
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(nproc) cores"
|
||||
make -j$(nproc) tools
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
emcmake cmake .. \
|
||||
-DHOST_BINARY_DIR=../build-host \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(nproc) cores"
|
||||
emmake make -j$(nproc)
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Publish preview
|
||||
run: |
|
||||
# setuptools is missing in this Docker image, which breaks installing
|
||||
# awscli. So we need to do this in two steps to recover sanity.
|
||||
pip3 install setuptools
|
||||
pip3 install awscli
|
||||
|
||||
~/.local/bin/aws s3 cp --only-show-errors build/openttd.data s3://${{ secrets.PREVIEW_S3_BUCKET }}/${{ github.event.client_payload.folder }}/
|
||||
~/.local/bin/aws s3 cp --only-show-errors build/openttd.html s3://${{ secrets.PREVIEW_S3_BUCKET }}/${{ github.event.client_payload.folder }}/
|
||||
~/.local/bin/aws s3 cp --only-show-errors build/openttd.js s3://${{ secrets.PREVIEW_S3_BUCKET }}/${{ github.event.client_payload.folder }}/
|
||||
~/.local/bin/aws s3 cp --only-show-errors build/openttd.wasm s3://${{ secrets.PREVIEW_S3_BUCKET }}/${{ github.event.client_payload.folder }}/
|
||||
|
||||
# Invalidate the cache of the CloudFront distribution
|
||||
~/.local/bin/aws cloudfront create-invalidation --distribution-id ${{ secrets.PREVIEW_CF_DISTRIBUTION_ID }} --paths "/${{ github.event.client_payload.folder }}/*"
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
||||
- name: Update deployment status to success
|
||||
uses: octokit/request-action@v2.x
|
||||
with:
|
||||
route: POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses
|
||||
mediaType: |
|
||||
previews:
|
||||
- ant-man
|
||||
- flash
|
||||
owner: ${{ github.event.repository.owner.login }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
deployment_id: ${{ github.event.client_payload.deployment_id }}
|
||||
state: success
|
||||
environment_url: https://preview.openttd.org/${{ github.event.client_payload.folder }}/
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PREVIEW_GITHUB_TOKEN }}
|
||||
|
||||
- if: failure()
|
||||
name: Update deployment status to failure
|
||||
uses: octokit/request-action@v2.x
|
||||
with:
|
||||
route: POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses
|
||||
mediaType: |
|
||||
previews:
|
||||
- ant-man
|
||||
- flash
|
||||
owner: ${{ github.event.repository.owner.login }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
deployment_id: ${{ github.event.client_payload.deployment_id }}
|
||||
state: failure
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PREVIEW_GITHUB_TOKEN }}
|
66
.github/workflows/preview_label.yml
vendored
66
.github/workflows/preview_label.yml
vendored
@@ -1,66 +0,0 @@
|
||||
name: Preview label
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- labeled
|
||||
|
||||
env:
|
||||
TEAM_CORE_DEVELOPER: core-developers
|
||||
|
||||
jobs:
|
||||
check_preview_label:
|
||||
name: Check for preview label
|
||||
if: github.event.action == 'labeled' && github.event.label.name == 'preview'
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Check if label was added by core developer
|
||||
id: core_developer
|
||||
continue-on-error: true
|
||||
uses: octokit/request-action@v2.x
|
||||
with:
|
||||
route: GET /orgs/OpenTTD/teams/${{ env.TEAM_CORE_DEVELOPER }}/memberships/${{ github.event.sender.login }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PREVIEW_GITHUB_TOKEN }}
|
||||
|
||||
- if: steps.core_developer.outcome == 'failure'
|
||||
name: Remove preview label if not core developer
|
||||
uses: octokit/request-action@v2.x
|
||||
with:
|
||||
route: DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/preview
|
||||
owner: ${{ github.event.repository.owner.login }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
issue_number: ${{ github.event.number }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PREVIEW_GITHUB_TOKEN }}
|
||||
|
||||
- if: steps.core_developer.outcome == 'success'
|
||||
name: Create deployment
|
||||
id: deployment
|
||||
uses: octokit/request-action@v2.x
|
||||
with:
|
||||
route: POST /repos/{owner}/{repo}/deployments
|
||||
mediaType: |
|
||||
previews:
|
||||
- ant-man
|
||||
- flash
|
||||
owner: ${{ github.event.repository.owner.login }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
task: deploy:preview
|
||||
auto_merge: false
|
||||
required_contexts: "[]"
|
||||
environment: preview-pr-${{ github.event.number }}
|
||||
description: "Preview for Pull Request #${{ github.event.number }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PREVIEW_GITHUB_TOKEN }}
|
||||
|
||||
- if: steps.core_developer.outcome == 'success'
|
||||
name: Trigger 'preview build'
|
||||
uses: peter-evans/repository-dispatch@v1
|
||||
with:
|
||||
token: ${{ secrets.PREVIEW_GITHUB_TOKEN }}
|
||||
event-type: "Preview build #${{ github.event.number }}"
|
||||
client-payload: '{"folder": "pr${{ github.event.number }}", "sha": "${{ github.event.pull_request.head.sha }}", "deployment_id": "${{ fromJson(steps.deployment.outputs.data).id }}"}'
|
66
.github/workflows/preview_push.yml
vendored
66
.github/workflows/preview_push.yml
vendored
@@ -1,66 +0,0 @@
|
||||
name: Preview push
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- synchronize
|
||||
|
||||
jobs:
|
||||
check_new_preview:
|
||||
name: Check preview needs update
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Check if earlier preview exists
|
||||
id: earlier_preview
|
||||
uses: octokit/request-action@v2.x
|
||||
with:
|
||||
route: GET /repos/{owner}/{repo}/deployments
|
||||
owner: ${{ github.event.repository.owner.login }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
environment: preview-pr-${{ github.event.number }}
|
||||
per_page: 1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PREVIEW_GITHUB_TOKEN }}
|
||||
|
||||
- if: toJson(fromJson(steps.earlier_preview.outputs.data)) != '[]'
|
||||
name: Check for preview label
|
||||
id: preview_label
|
||||
uses: octokit/request-action@v2.x
|
||||
with:
|
||||
route: GET /repos/{owner}/{repo}/issues/{issue_number}/labels
|
||||
owner: ${{ github.event.repository.owner.login }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
issue_number: ${{ github.event.number }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PREVIEW_GITHUB_TOKEN }}
|
||||
|
||||
- if: toJson(fromJson(steps.earlier_preview.outputs.data)) != '[]' && contains(fromJson(steps.preview_label.outputs.data).*.name, 'preview')
|
||||
name: Create deployment
|
||||
id: deployment
|
||||
uses: octokit/request-action@v2.x
|
||||
with:
|
||||
route: POST /repos/{owner}/{repo}/deployments
|
||||
mediaType: |
|
||||
previews:
|
||||
- ant-man
|
||||
- flash
|
||||
owner: ${{ github.event.repository.owner.login }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
task: deploy:preview
|
||||
auto_merge: false
|
||||
required_contexts: "[]"
|
||||
environment: preview-pr-${{ github.event.number }}
|
||||
description: "Preview for Pull Request #${{ github.event.number }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PREVIEW_GITHUB_TOKEN }}
|
||||
|
||||
- if: toJson(fromJson(steps.earlier_preview.outputs.data)) != '[]' && contains(fromJson(steps.preview_label.outputs.data).*.name, 'preview')
|
||||
name: Trigger 'preview build'
|
||||
uses: peter-evans/repository-dispatch@v1
|
||||
with:
|
||||
token: ${{ secrets.PREVIEW_GITHUB_TOKEN }}
|
||||
event-type: "Preview build #${{ github.event.number }}"
|
||||
client-payload: '{"folder": "pr${{ github.event.number }}", "sha": "${{ github.event.pull_request.head.sha }}", "deployment_id": "${{ fromJson(steps.deployment.outputs.data).id }}"}'
|
991
.github/workflows/release.yml
vendored
991
.github/workflows/release.yml
vendored
@@ -1,991 +0,0 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: 'Ref to build (for Pull Requests, use refs/pull/NNN/head)'
|
||||
required: true
|
||||
repository_dispatch:
|
||||
# client_payload should be the same as the inputs for workflow_dispatch.
|
||||
types:
|
||||
- Build*
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
|
||||
jobs:
|
||||
source:
|
||||
name: Source
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
outputs:
|
||||
version: ${{ steps.metadata.outputs.version }}
|
||||
is_tag: ${{ steps.metadata.outputs.is_tag }}
|
||||
trigger_type: ${{ steps.metadata.outputs.trigger_type }}
|
||||
folder: ${{ steps.metadata.outputs.folder }}
|
||||
|
||||
steps:
|
||||
- name: Checkout (Release)
|
||||
if: github.event_name == 'release'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# We generate a changelog; for this we need the full git log.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Checkout (Manual)
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
# We generate a changelog; for this we need the full git log.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Checkout (Trigger)
|
||||
if: github.event_name == 'repository_dispatch'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.client_payload.ref }}
|
||||
# We generate a changelog; for this we need the full git log.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check valid branch name
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
REF="${{ github.event.inputs.ref }}"
|
||||
elif [ "${{ github.event_name }}" = "repository_dispatch" ]; then
|
||||
REF="${{ github.event.client_payload.ref }}"
|
||||
else
|
||||
REF="${{ github.ref }}"
|
||||
fi
|
||||
|
||||
# Check if we are a tag.
|
||||
if [ -n "$(git name-rev --name-only --tags --no-undefined HEAD 2>/dev/null || false)" ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Check if the checkout caused the branch to be named.
|
||||
if [ "$(git rev-parse --abbrev-ref HEAD)" != "HEAD" ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Check if this was a pull request.
|
||||
if [ -n "$(echo ${REF} | grep '^refs/pull/[0-9]*')" ]; then
|
||||
PULL=$(echo ${REF} | cut -d/ -f3)
|
||||
git checkout -b pr${PULL}
|
||||
fi
|
||||
|
||||
# Are we still in a detached state? Error out.
|
||||
if [ "$(git rev-parse --abbrev-ref HEAD)" == "HEAD" ]; then
|
||||
echo "The 'ref' given resulted in a checkout of a detached HEAD."
|
||||
echo "We cannot detect the version for these checkout accurate."
|
||||
echo ""
|
||||
echo "If you want to build a Pull Request, make sure you use 'refs/pull/NNN/head'."
|
||||
echo ""
|
||||
echo "Cancelling build, as without a version we cannot store the artifacts."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Generate metadata
|
||||
id: metadata
|
||||
run: |
|
||||
echo "::group::Prepare metadata files"
|
||||
cmake -DGENERATE_OTTDREV=1 -P cmake/scripts/FindVersion.cmake
|
||||
./.github/changelog.sh > .changelog
|
||||
TZ='UTC' date +"%Y-%m-%d %H:%M UTC" > .release_date
|
||||
cat .ottdrev | cut -f 1 -d$'\t' > .version
|
||||
|
||||
if [ $(cat .ottdrev | cut -f 5 -d$'\t') = '1' ]; then
|
||||
# Assume that all tags are always releases. Why else make a tag?
|
||||
IS_TAG="true"
|
||||
|
||||
FOLDER="${{ env.FOLDER_RELEASES }}"
|
||||
TRIGGER_TYPE="new-tag"
|
||||
else
|
||||
IS_TAG="false"
|
||||
|
||||
BRANCH=$(git symbolic-ref -q HEAD | sed 's@.*/@@')
|
||||
if [ -z "${BRANCH}" ]; then
|
||||
echo "Internal error: branch name is empty."
|
||||
echo "An earlier step should have prevented this from happening."
|
||||
echo "Cancelling build, as without a branch name we cannot store the artifacts"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "${BRANCH}" = "${{ env.NIGHTLIES_BRANCH }}" ]; then
|
||||
# The "master" branch is special, and we call a nightly.
|
||||
FOLDER="${{ env.FOLDER_NIGHTLIES }}/$(date +%Y)"
|
||||
TRIGGER_TYPE="new-master"
|
||||
else
|
||||
# All other branches, which can be builds of Pull Requests, are
|
||||
# put in their own folder.
|
||||
FOLDER="${{ env.FOLDER_BRANCHES }}/${BRANCH}"
|
||||
TRIGGER_TYPE="new-branch"
|
||||
fi
|
||||
fi
|
||||
|
||||
mkdir -p build/bundles
|
||||
cp .changelog build/bundles/changelog.txt
|
||||
cp .release_date build/bundles/released.txt
|
||||
cp README.md build/bundles/README.md
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "Release Date: $(cat .release_date)"
|
||||
echo "Revision: $(cat .ottdrev)"
|
||||
echo "Version: $(cat .version)"
|
||||
echo "Is tag: ${IS_TAG}"
|
||||
echo "Folder on CDN: ${FOLDER}"
|
||||
echo "Workflow trigger: ${TRIGGER_TYPE}"
|
||||
|
||||
echo "::set-output name=version::$(cat .version)"
|
||||
echo "::set-output name=is_tag::${IS_TAG}"
|
||||
echo "::set-output name=folder::${FOLDER}"
|
||||
echo "::set-output name=trigger_type::${TRIGGER_TYPE}"
|
||||
env:
|
||||
NIGHTLIES_BRANCH: master
|
||||
FOLDER_RELEASES: openttd-releases
|
||||
FOLDER_NIGHTLIES: openttd-nightlies
|
||||
FOLDER_BRANCHES: openttd-branches
|
||||
|
||||
- name: Remove VCS information
|
||||
run: |
|
||||
rm -rf .git
|
||||
|
||||
- name: Create bundles
|
||||
run: |
|
||||
FOLDER_NAME=openttd-${{ steps.metadata.outputs.version }}
|
||||
|
||||
# Rename the folder to openttd-NNN
|
||||
mkdir ${FOLDER_NAME}
|
||||
find . -maxdepth 1 -not -name . -not -name build -not -name ${FOLDER_NAME} -exec mv {} ${FOLDER_NAME}/ \;
|
||||
|
||||
echo "::group::Create tarball (xz) bundle"
|
||||
tar --xz -cvf build/bundles/${FOLDER_NAME}-source.tar.xz ${FOLDER_NAME}
|
||||
echo "::endgroup::"
|
||||
|
||||
# This tarball is only to be used within this workflow.
|
||||
echo "::group::Create tarball (gz) bundle"
|
||||
tar --gzip -cvf source.tar.gz ${FOLDER_NAME}
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Create zip bundle"
|
||||
zip -9 -r build/bundles/${FOLDER_NAME}-source.zip ${FOLDER_NAME}
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Store bundles
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: openttd-source
|
||||
path: build/bundles/*
|
||||
retention-days: 5
|
||||
|
||||
- name: Store source (for other jobs)
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: internal-source
|
||||
path: source.tar.gz
|
||||
retention-days: 1
|
||||
|
||||
docs:
|
||||
name: Docs
|
||||
needs: source
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Download source
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: internal-source
|
||||
|
||||
- name: Unpack source
|
||||
run: |
|
||||
tar -xf source.tar.gz --strip-components=1
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
echo "::group::Update apt"
|
||||
sudo apt-get update
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Install dependencies"
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
doxygen \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mkdir -p ${GITHUB_WORKSPACE}/build
|
||||
cd ${GITHUB_WORKSPACE}/build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DOPTION_DOCS_ONLY=ON \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
cmake --build . --target docs
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Create bundles
|
||||
run: |
|
||||
BASENAME=openttd-${{ needs.source.outputs.version }}
|
||||
|
||||
cd ${GITHUB_WORKSPACE}/build
|
||||
|
||||
mv docs/source ${BASENAME}-docs
|
||||
mv docs/ai-api ${BASENAME}-docs-ai
|
||||
mv docs/gs-api ${BASENAME}-docs-gs
|
||||
|
||||
mkdir -p bundles
|
||||
|
||||
echo "::group::Create docs bundle"
|
||||
tar --xz -cf bundles/${BASENAME}-docs.tar.xz ${BASENAME}-docs
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Create AI API docs bundle"
|
||||
tar --xz -cf bundles/${BASENAME}-docs-ai.tar.xz ${BASENAME}-docs-ai
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Create GameScript API docs bundle"
|
||||
tar --xz -cf bundles/${BASENAME}-docs-gs.tar.xz ${BASENAME}-docs-gs
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Store bundles
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: openttd-docs
|
||||
path: build/bundles/*.tar.xz
|
||||
retention-days: 5
|
||||
|
||||
linux:
|
||||
name: Linux (Generic)
|
||||
needs: source
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
# manylinux2014 is based on CentOS 7, but already has a lot of things
|
||||
# installed and preconfigured. It makes it easier to build OpenTTD.
|
||||
image: quay.io/pypa/manylinux2014_x86_64
|
||||
|
||||
steps:
|
||||
- name: Download source
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: internal-source
|
||||
|
||||
- name: Unpack source
|
||||
run: |
|
||||
tar -xf source.tar.gz --strip-components=1
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
echo "::group::Install dependencies"
|
||||
yum install -y \
|
||||
fontconfig-devel \
|
||||
freetype-devel \
|
||||
libicu-devel \
|
||||
libpng-devel \
|
||||
libpng-devel \
|
||||
lzo-devel \
|
||||
SDL2-devel \
|
||||
wget \
|
||||
xz-devel \
|
||||
zlib-devel \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
# The yum variant of fluidsynth depends on all possible audio drivers,
|
||||
# like jack, ALSA, pulseaudio, etc. This is not really useful for us,
|
||||
# as we route the output of fluidsynth back via our sound driver, and
|
||||
# as such do not use these audio driver outputs at all. So instead,
|
||||
# we compile fluidsynth ourselves, with as little dependencies as
|
||||
# possible. This currently means it picks up SDL2, but this is fine,
|
||||
# as we need SDL2 anyway.
|
||||
echo "::group::Install fluidsynth"
|
||||
wget https://github.com/FluidSynth/fluidsynth/archive/v2.1.6.tar.gz
|
||||
tar xf v2.1.6.tar.gz
|
||||
(
|
||||
cd fluidsynth-2.1.6
|
||||
mkdir build
|
||||
cd build
|
||||
cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX=/usr
|
||||
cmake --build . -j $(nproc)
|
||||
cmake --install .
|
||||
)
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mkdir -p build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DOPTION_PACKAGE_DEPENDENCIES=ON \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(nproc) cores"
|
||||
cmake --build . -j $(nproc)
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Create bundles
|
||||
run: |
|
||||
cd ${GITHUB_WORKSPACE}/build
|
||||
echo "::group::Run CPack"
|
||||
cpack
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Cleanup"
|
||||
# Remove the sha256 files CPack generates; we will do this ourself at
|
||||
# the end of this workflow.
|
||||
rm -f bundles/*.sha256
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Store bundles
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: openttd-linux-generic
|
||||
path: build/bundles
|
||||
retention-days: 5
|
||||
|
||||
linux-distro:
|
||||
name: Linux (Distros)
|
||||
needs: source
|
||||
|
||||
if: needs.source.outputs.is_tag == 'true'
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- container_image: "ubuntu:18.04"
|
||||
bundle_name: "bionic"
|
||||
compiler: "g++-8"
|
||||
- container_image: "ubuntu:20.04"
|
||||
bundle_name: "focal"
|
||||
compiler: "g++"
|
||||
- container_image: "ubuntu:20.10"
|
||||
bundle_name: "groovy"
|
||||
compiler: "g++"
|
||||
- container_image: "debian:buster"
|
||||
bundle_name: "buster"
|
||||
compiler: "g++"
|
||||
- container_image: "debian:bullseye"
|
||||
bundle_name: "bullseye"
|
||||
compiler: "g++"
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
image: ${{ matrix.container_image }}
|
||||
|
||||
steps:
|
||||
- name: Download source
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: internal-source
|
||||
|
||||
- name: Unpack source
|
||||
run: |
|
||||
tar -xf source.tar.gz --strip-components=1
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
echo "::group::Update apt"
|
||||
apt-get update
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Install dependencies"
|
||||
apt-get install -y --no-install-recommends \
|
||||
cmake \
|
||||
debhelper \
|
||||
${{ matrix.compiler }} \
|
||||
git \
|
||||
make \
|
||||
openssl \
|
||||
libfontconfig-dev \
|
||||
libfluidsynth-dev \
|
||||
libicu-dev \
|
||||
liblzma-dev \
|
||||
liblzo2-dev \
|
||||
libsdl2-dev \
|
||||
lsb-release \
|
||||
zlib1g-dev \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mkdir -p build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
CXX=${{ matrix.compiler }} cmake ${GITHUB_WORKSPACE} \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(nproc) cores"
|
||||
# Ubuntu 18.04 cmake does not support -j so we pass the option to the native tool
|
||||
cmake --build . -- -j $(nproc)
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Create bundles
|
||||
run: |
|
||||
cd ${GITHUB_WORKSPACE}/build
|
||||
echo "::group::Run CPack"
|
||||
cpack
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Cleanup"
|
||||
# Remove the sha256 files CPack generates; we will do this ourself at
|
||||
# the end of this workflow.
|
||||
rm -f bundles/*.sha256
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Store bundles
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: openttd-linux-${{ matrix.bundle_name }}
|
||||
path: build/bundles
|
||||
retention-days: 5
|
||||
|
||||
macos:
|
||||
name: MacOS
|
||||
needs: source
|
||||
|
||||
runs-on: macos-10.15
|
||||
env:
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.14
|
||||
|
||||
steps:
|
||||
- name: Download source
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: internal-source
|
||||
|
||||
- name: Unpack source
|
||||
run: |
|
||||
tar -xf source.tar.gz --strip-components=1
|
||||
|
||||
- name: Install dependencies
|
||||
env:
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install pandoc
|
||||
|
||||
- name: Prepare cache key
|
||||
id: key
|
||||
run: |
|
||||
echo "::set-output name=image::$ImageOS-$ImageVersion"
|
||||
|
||||
- name: Enable vcpkg cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /usr/local/share/vcpkg/installed
|
||||
key: ${{ steps.key.outputs.image }}-vcpkg-release-0 # Increase the number whenever dependencies are modified
|
||||
restore-keys: |
|
||||
${{ steps.key.outputs.image }}-vcpkg-release
|
||||
${{ steps.key.outputs.image }}-vcpkg-x64
|
||||
|
||||
- name: Prepare vcpkg
|
||||
run: |
|
||||
vcpkg install \
|
||||
liblzma:x64-osx \
|
||||
liblzma:arm64-osx \
|
||||
libpng:x64-osx \
|
||||
libpng:arm64-osx \
|
||||
lzo:x64-osx \
|
||||
lzo:arm64-osx \
|
||||
zlib:x64-osx \
|
||||
zlib:arm64-osx \
|
||||
# EOF
|
||||
|
||||
- name: Install GCC problem matcher
|
||||
uses: ammaraskar/gcc-problem-matcher@master
|
||||
|
||||
- name: Build tools
|
||||
run: |
|
||||
mkdir build-host
|
||||
cd build-host
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DOPTION_TOOLS_ONLY=ON \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build tools"
|
||||
echo "Running on $(sysctl -n hw.logicalcpu) cores"
|
||||
cmake --build . -j $(sysctl -n hw.logicalcpu) --target tools
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Import code signing certificates
|
||||
uses: Apple-Actions/import-codesign-certs@v1
|
||||
with:
|
||||
# The certificates in a PKCS12 file encoded as a base64 string
|
||||
p12-file-base64: ${{ secrets.APPLE_DEVELOPER_CERTIFICATE_P12_BASE64 }}
|
||||
# The password used to import the PKCS12 file.
|
||||
p12-password: ${{ secrets.APPLE_DEVELOPER_CERTIFICATE_PASSWORD }}
|
||||
# If this is run on a fork, there may not be a certificate set up - continue in this case
|
||||
continue-on-error: true
|
||||
|
||||
- name: Build arm64
|
||||
run: |
|
||||
mkdir build-arm64
|
||||
cd build-arm64
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-DCMAKE_OSX_ARCHITECTURES=arm64 \
|
||||
-DVCPKG_TARGET_TRIPLET=arm64-osx \
|
||||
-DCMAKE_TOOLCHAIN_FILE=/usr/local/share/vcpkg/scripts/buildsystems/vcpkg.cmake \
|
||||
-DHOST_BINARY_DIR=${GITHUB_WORKSPACE}/build-host \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(sysctl -n hw.logicalcpu) cores"
|
||||
cmake --build . -j $(sysctl -n hw.logicalcpu)
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Build x64
|
||||
run: |
|
||||
mkdir build-x64
|
||||
cd build-x64
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-DCMAKE_OSX_ARCHITECTURES=x86_64 \
|
||||
-DVCPKG_TARGET_TRIPLET=x64-osx \
|
||||
-DCMAKE_TOOLCHAIN_FILE=/usr/local/share/vcpkg/scripts/buildsystems/vcpkg.cmake \
|
||||
-DHOST_BINARY_DIR=${GITHUB_WORKSPACE}/build-host \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DCPACK_BUNDLE_APPLE_CERT_APP=${{ secrets.APPLE_DEVELOPER_CERTIFICATE_ID }} \
|
||||
"-DCPACK_BUNDLE_APPLE_CODESIGN_PARAMETER=--deep -f --options runtime" \
|
||||
-DAPPLE_UNIVERSAL_PACKAGE=1 \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
echo "Running on $(sysctl -n hw.logicalcpu) cores"
|
||||
cmake --build . -j $(sysctl -n hw.logicalcpu)
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Create bundles
|
||||
run: |
|
||||
cd build-x64
|
||||
|
||||
echo "::group::Create universal binary"
|
||||
# Combine the `openttd` binaries from each build into a single file
|
||||
lipo -create -output openttd-universal ../build-*/openttd
|
||||
mv openttd-universal openttd
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Run CPack"
|
||||
cpack
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Cleanup"
|
||||
# Remove the sha256 files CPack generates; we will do this ourself at
|
||||
# the end of this workflow.
|
||||
rm -f bundles/*.sha256
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Install gon
|
||||
env:
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew tap mitchellh/gon
|
||||
brew install mitchellh/gon/gon
|
||||
|
||||
- name: Notarize
|
||||
env:
|
||||
AC_USERNAME: ${{ secrets.APPLE_DEVELOPER_APP_USERNAME }}
|
||||
AC_PASSWORD: ${{ secrets.APPLE_DEVELOPER_APP_PASSWORD }}
|
||||
run: |
|
||||
cd build-x64
|
||||
../os/macosx/notarize.sh
|
||||
|
||||
- name: Build zip
|
||||
run: |
|
||||
cd build-x64
|
||||
|
||||
pushd _CPack_Packages/*/Bundle/openttd-*/
|
||||
|
||||
# Remove the Applications symlink from the staging folder
|
||||
rm -f Applications
|
||||
|
||||
# Remove the original dmg built by CPack to avoid a conflict when resolving
|
||||
# the zip_filename variable below
|
||||
rm -f ../*.dmg
|
||||
|
||||
zip_filename=(../openttd-*)
|
||||
|
||||
# Package up the existing, notarised .app into a zip file
|
||||
zip -r -9 ${zip_filename}.zip OpenTTD.app
|
||||
|
||||
popd
|
||||
|
||||
# Now move it into place to be uploaded
|
||||
mv _CPack_Packages/*/Bundle/openttd-*.zip bundles/
|
||||
|
||||
- name: Store bundles
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: openttd-macos-universal
|
||||
path: build-x64/bundles
|
||||
retention-days: 5
|
||||
|
||||
windows:
|
||||
name: Windows
|
||||
needs: source
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arch: x86
|
||||
host: x86
|
||||
- arch: x64
|
||||
host: x64
|
||||
- arch: arm64
|
||||
host: x64_arm64
|
||||
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- name: Download source
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: internal-source
|
||||
|
||||
- name: Unpack source
|
||||
shell: bash
|
||||
run: |
|
||||
tar -xf source.tar.gz --strip-components=1
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
choco install pandoc
|
||||
|
||||
- name: Prepare cache key
|
||||
id: key
|
||||
shell: powershell
|
||||
run: |
|
||||
# Work around caching failure with GNU tar
|
||||
New-Item -Type Junction -Path vcpkg -Target c:\vcpkg
|
||||
|
||||
Write-Output "::set-output name=image::$env:ImageOS-$env:ImageVersion"
|
||||
|
||||
- name: Enable vcpkg cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: vcpkg/installed
|
||||
key: ${{ steps.key.outputs.image }}-vcpkg-${{ matrix.arch }}-0 # Increase the number whenever dependencies are modified
|
||||
restore-keys: |
|
||||
${{ steps.key.outputs.image }}-vcpkg-${{ matrix.arch }}
|
||||
|
||||
- name: Prepare vcpkg
|
||||
shell: bash
|
||||
run: |
|
||||
vcpkg install --triplet=${{ matrix.arch }}-windows-static \
|
||||
liblzma \
|
||||
libpng \
|
||||
lzo \
|
||||
zlib \
|
||||
# EOF
|
||||
|
||||
- name: Install MSVC problem matcher
|
||||
uses: ammaraskar/msvc-problem-matcher@master
|
||||
|
||||
- name: Configure developer command prompt for tools
|
||||
uses: ilammy/msvc-dev-cmd@v1
|
||||
with:
|
||||
arch: x64
|
||||
|
||||
- name: Build tools
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir build-host
|
||||
cd build-host
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-GNinja \
|
||||
-DOPTION_TOOLS_ONLY=ON \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
cmake --build . --target tools
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Configure developer command prompt for ${{ matrix.arch }}
|
||||
uses: ilammy/msvc-dev-cmd@v1
|
||||
with:
|
||||
arch: ${{ matrix.host }}
|
||||
|
||||
- name: Import code signing certificate
|
||||
shell: powershell
|
||||
# If this is run on a fork, there may not be a certificate set up - continue in this case
|
||||
continue-on-error: true
|
||||
run: |
|
||||
$tempFile = [System.IO.Path]::GetTempFileName()
|
||||
$bytes = [System.Convert]::FromBase64String($env:WINDOWS_CERTIFICATE_P12)
|
||||
[IO.File]::WriteAllBytes($tempFile, $bytes)
|
||||
$pwd = ConvertTo-SecureString $env:WINDOWS_CERTIFICATE_PASSWORD -AsPlainText -Force
|
||||
Import-PfxCertificate -FilePath $tempFile -CertStoreLocation Cert:\CurrentUser\My -Password $pwd
|
||||
Remove-Item $tempFile
|
||||
env:
|
||||
WINDOWS_CERTIFICATE_P12: ${{ secrets.WINDOWS_CERTIFICATE_P12 }}
|
||||
WINDOWS_CERTIFICATE_PASSWORD: ${{ secrets.WINDOWS_CERTIFICATE_PASSWORD }}
|
||||
|
||||
- name: Build (with installer)
|
||||
if: needs.source.outputs.is_tag == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-GNinja \
|
||||
-DVCPKG_TARGET_TRIPLET=${{ matrix.arch }}-windows-static \
|
||||
-DCMAKE_TOOLCHAIN_FILE="c:\vcpkg\scripts\buildsystems\vcpkg.cmake" \
|
||||
-DOPTION_USE_NSIS=ON \
|
||||
-DHOST_BINARY_DIR=${GITHUB_WORKSPACE}/build-host \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DWINDOWS_CERTIFICATE_COMMON_NAME="${WINDOWS_CERTIFICATE_COMMON_NAME}" \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
cmake --build .
|
||||
echo "::endgroup::"
|
||||
env:
|
||||
WINDOWS_CERTIFICATE_COMMON_NAME: ${{ secrets.WINDOWS_CERTIFICATE_COMMON_NAME }}
|
||||
|
||||
- name: Build (without installer)
|
||||
if: needs.source.outputs.is_tag != 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
echo "::group::CMake"
|
||||
cmake ${GITHUB_WORKSPACE} \
|
||||
-GNinja \
|
||||
-DVCPKG_TARGET_TRIPLET=${{ matrix.arch }}-windows-static \
|
||||
-DCMAKE_TOOLCHAIN_FILE="c:\vcpkg\scripts\buildsystems\vcpkg.cmake" \
|
||||
-DHOST_BINARY_DIR=${GITHUB_WORKSPACE}/build-host \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DWINDOWS_CERTIFICATE_COMMON_NAME="${WINDOWS_CERTIFICATE_COMMON_NAME}" \
|
||||
# EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Build"
|
||||
cmake --build .
|
||||
echo "::endgroup::"
|
||||
env:
|
||||
WINDOWS_CERTIFICATE_COMMON_NAME: ${{ secrets.WINDOWS_CERTIFICATE_COMMON_NAME }}
|
||||
|
||||
- name: Create bundles
|
||||
shell: bash
|
||||
run: |
|
||||
cd ${GITHUB_WORKSPACE}/build
|
||||
echo "::group::Run CPack"
|
||||
cpack
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Prepare PDB to be bundled"
|
||||
PDB=$(ls bundles/*.zip | cut -d/ -f2 | sed 's/.zip$/.pdb/')
|
||||
cp openttd.pdb bundles/${PDB}
|
||||
xz -9 bundles/${PDB}
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Cleanup"
|
||||
# Remove the sha256 files CPack generates; we will do this ourself at
|
||||
# the end of this workflow.
|
||||
rm -f bundles/*.sha256
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Sign installer
|
||||
if: needs.source.outputs.is_tag == 'true'
|
||||
shell: bash
|
||||
# If this is run on a fork, there may not be a certificate set up - continue in this case
|
||||
continue-on-error: true
|
||||
run: |
|
||||
cd ${GITHUB_WORKSPACE}/build/bundles
|
||||
../../os/windows/sign.bat *.exe "${WINDOWS_CERTIFICATE_COMMON_NAME}"
|
||||
env:
|
||||
WINDOWS_CERTIFICATE_COMMON_NAME: ${{ secrets.WINDOWS_CERTIFICATE_COMMON_NAME }}
|
||||
|
||||
- name: Store bundles
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: openttd-windows-${{ matrix.arch }}
|
||||
path: build/bundles
|
||||
retention-days: 5
|
||||
|
||||
upload:
|
||||
name: Upload (AWS)
|
||||
needs:
|
||||
- source
|
||||
- docs
|
||||
- linux
|
||||
- linux-distro
|
||||
- macos
|
||||
- windows
|
||||
|
||||
# The 'linux' job can be skipped if it is a nightly. That normally causes
|
||||
# this job to be skipped too, unless we have this length boy :)
|
||||
# "always()" is important here, it is the keyword to use to stop skipping
|
||||
# this job if any dependency is skipped. It looks a bit silly, but it is
|
||||
# how GitHub Actions work ;)
|
||||
if: always() && needs.source.result == 'success' && needs.docs.result == 'success' && needs.linux.result == 'success' && (needs.linux-distro.result == 'success' || needs.linux-distro.result == 'skipped') && needs.macos.result == 'success' && needs.windows.result == 'success'
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Download all bundles
|
||||
uses: actions/download-artifact@v2
|
||||
|
||||
- name: Calculate checksums
|
||||
run: |
|
||||
echo "::group::Move bundles to a single folder"
|
||||
mkdir bundles
|
||||
mv openttd-*/* bundles/
|
||||
cd bundles
|
||||
echo "::group::Build"
|
||||
|
||||
for i in $(ls openttd-*); do
|
||||
echo "::group::Calculating checksums for ${i}"
|
||||
openssl dgst -r -md5 -hex $i > $i.md5sum
|
||||
openssl dgst -r -sha1 -hex $i > $i.sha1sum
|
||||
openssl dgst -r -sha256 -hex $i > $i.sha256sum
|
||||
echo "::endgroup::"
|
||||
done
|
||||
|
||||
- name: Upload bundles to AWS
|
||||
run: |
|
||||
aws s3 cp --recursive --only-show-errors bundles/ s3://${{ secrets.CDN_S3_BUCKET }}/${{ needs.source.outputs.folder }}/${{ needs.source.outputs.version }}/
|
||||
|
||||
# We do not invalidate the CloudFront distribution here. The trigger
|
||||
# for "New OpenTTD release" first updated the manifest files and
|
||||
# creates an index.html. We invalidate after that, so everything
|
||||
# becomes visible at once.
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
|
||||
|
||||
- name: Trigger 'New OpenTTD release'
|
||||
uses: peter-evans/repository-dispatch@v1
|
||||
with:
|
||||
token: ${{ secrets.DEPLOYMENT_TOKEN }}
|
||||
repository: OpenTTD/workflows
|
||||
event-type: ${{ needs.source.outputs.trigger_type }}
|
||||
client-payload: '{"version": "${{ needs.source.outputs.version }}", "folder": "${{ needs.source.outputs.folder }}"}'
|
||||
|
||||
upload-steam:
|
||||
name: Upload (Steam)
|
||||
needs:
|
||||
- source
|
||||
- linux
|
||||
- macos
|
||||
- windows
|
||||
|
||||
if: needs.source.outputs.trigger_type == 'new-master' || needs.source.outputs.trigger_type == 'new-tag'
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Download all bundles
|
||||
uses: actions/download-artifact@v2
|
||||
|
||||
- name: Setup steamcmd
|
||||
uses: CyberAndrii/setup-steamcmd@v1
|
||||
|
||||
- name: Generate Steam auth code
|
||||
id: steam-totp
|
||||
uses: CyberAndrii/steam-totp@v1
|
||||
with:
|
||||
shared_secret: ${{ secrets.STEAM_SHARED_SECRET }}
|
||||
|
||||
- name: Upload to Steam
|
||||
run: |
|
||||
echo "::group::Extracting source"
|
||||
mkdir source
|
||||
(
|
||||
cd source
|
||||
tar -xf ../internal-source/source.tar.gz --strip-components=1
|
||||
)
|
||||
echo "::endgroup::"
|
||||
|
||||
mkdir steam
|
||||
(
|
||||
cd steam
|
||||
|
||||
echo "::group::Prepare Win32"
|
||||
unzip ../openttd-windows-x86/openttd-*-windows-win32.zip
|
||||
mv openttd-*-windows-win32 steam-win32
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Prepare Win64"
|
||||
unzip ../openttd-windows-x64/openttd-*-windows-win64.zip
|
||||
mv openttd-*-windows-win64 steam-win64
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Prepare macOS"
|
||||
mkdir steam-macos
|
||||
(
|
||||
cd steam-macos
|
||||
unzip ../../openttd-macos-universal/openttd-*-macos-universal.zip
|
||||
)
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Prepare Linux"
|
||||
tar xvf ../openttd-linux-generic/openttd-*-linux-generic-amd64.tar.xz
|
||||
mv openttd-*-linux-generic-amd64 steam-linux
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Preparing build file"
|
||||
if [ "${{ needs.source.outputs.trigger_type }}" = "new-tag" ]; then
|
||||
BRANCH="testing"
|
||||
else
|
||||
BRANCH="nightly"
|
||||
fi
|
||||
cat ../source/os/steam/release.vdf | sed 's/@@DESCRIPTION@@/openttd-${{ needs.source.outputs.version }}/;s/@@BRANCH@@/'${BRANCH}'/' > release.vdf
|
||||
cat release.vdf
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::Upload to Steam"
|
||||
steamcmd +login ${{ secrets.STEAM_USERNAME }} ${{ secrets.STEAM_PASSWORD }} ${{ steps.steam-totp.outputs.code }} +run_app_build $(pwd)/release.vdf +quit
|
||||
echo "::endgroup::"
|
||||
)
|
18
.github/workflows/unused-strings.yml
vendored
18
.github/workflows/unused-strings.yml
vendored
@@ -1,18 +0,0 @@
|
||||
name: Unused strings
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
unused-strings:
|
||||
name: Unused strings
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Check for unused strings
|
||||
run: |
|
||||
set -ex
|
||||
python3 .github/unused-strings.py
|
56
.gitignore
vendored
56
.gitignore
vendored
@@ -1,7 +1,55 @@
|
||||
/.vs
|
||||
/build*
|
||||
CMakeSettings.json
|
||||
bin/*
|
||||
!bin/ai
|
||||
bin/ai/*
|
||||
!bin/ai/compat*.nut
|
||||
!bin/ai/regression
|
||||
!bin/data
|
||||
bin/baseset/*
|
||||
!bin/baseset/openttd.grf
|
||||
!bin/baseset/opntitle.dat
|
||||
!bin/baseset/orig_extra.grf
|
||||
!bin/baseset/orig_*.obg
|
||||
!bin/baseset/orig_*.obs
|
||||
!bin/baseset/no_sound.obs
|
||||
!bin/baseset/no_music.obm
|
||||
!bin/baseset/orig_*.obm
|
||||
!bin/game
|
||||
bin/game/*
|
||||
!bin/game/compat*.nut
|
||||
!bin/scripts
|
||||
bin/scripts/*
|
||||
!bin/scripts/*.example
|
||||
!bin/scripts/readme.txt
|
||||
|
||||
*.aps
|
||||
bundle/*
|
||||
bundles/*
|
||||
docs/aidocs/*
|
||||
docs/gamedocs/*
|
||||
docs/source/*
|
||||
/out
|
||||
.kdev4
|
||||
.kdev4/*
|
||||
*.kdev4
|
||||
media/openttd.desktop
|
||||
media/openttd.desktop.install
|
||||
objs/*
|
||||
projects/.vs
|
||||
projects/Debug
|
||||
projects/Release
|
||||
projects/*.ncb
|
||||
projects/*.suo
|
||||
projects/*.sdf
|
||||
projects/*.opensdf
|
||||
projects/*.vcproj.*.user
|
||||
projects/*.vcxproj.user
|
||||
projects/*.VC.db
|
||||
projects/*.VC.opendb
|
||||
src/rev.cpp
|
||||
src/os/windows/ottdres.rc
|
||||
|
||||
/Makefile*
|
||||
!/Makefile.msvc
|
||||
/config.*
|
||||
!/config.lib
|
||||
!*.in
|
||||
*.tmp
|
||||
|
34
.hgignore
Normal file
34
.hgignore
Normal file
@@ -0,0 +1,34 @@
|
||||
syntax: glob
|
||||
|
||||
.svn
|
||||
*.aps
|
||||
bin/baseset/openttd.32.bmp
|
||||
bin/lang/*
|
||||
bin/openttd*
|
||||
bin/*.cfg
|
||||
bundle/*
|
||||
bundles/*
|
||||
config.cache*
|
||||
config.log
|
||||
config.pwd
|
||||
docs/aidocs/*
|
||||
docs/gamedocs/*
|
||||
docs/source/*
|
||||
.kdev4
|
||||
.kdev4/*
|
||||
*.kdev4
|
||||
Makefile
|
||||
Makefile.am
|
||||
Makefile.bundle
|
||||
media/openttd.desktop
|
||||
media/openttd.desktop.install
|
||||
objs/*
|
||||
projects/.vs
|
||||
projects/*.ncb
|
||||
projects/*.suo
|
||||
projects/*.sdf
|
||||
projects/*.opensdf
|
||||
projects/*.vcproj.*.user
|
||||
projects/*.vcxproj.user
|
||||
src/rev.cpp
|
||||
src/os/windows/ottdres.rc
|
385
CMakeLists.txt
385
CMakeLists.txt
@@ -1,385 +0,0 @@
|
||||
cmake_minimum_required(VERSION 3.9)
|
||||
|
||||
if(NOT BINARY_NAME)
|
||||
set(BINARY_NAME openttd)
|
||||
endif()
|
||||
|
||||
project(${BINARY_NAME}
|
||||
VERSION 12.0
|
||||
)
|
||||
|
||||
if(CMAKE_SOURCE_DIR STREQUAL CMAKE_BINARY_DIR)
|
||||
message(FATAL_ERROR "In-source builds not allowed. Please run \"cmake ..\" from the build directory. You may need to delete \"${CMAKE_SOURCE_DIR}/CMakeCache.txt\" first.")
|
||||
endif()
|
||||
|
||||
# Debug mode by default.
|
||||
if(NOT CMAKE_BUILD_TYPE)
|
||||
set(CMAKE_BUILD_TYPE Debug)
|
||||
endif()
|
||||
|
||||
if (EMSCRIPTEN)
|
||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/os/emscripten/cmake")
|
||||
endif()
|
||||
|
||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake")
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET 10.14)
|
||||
|
||||
# Use GNUInstallDirs to allow customisation
|
||||
# but set our own default data and bin dir
|
||||
if(NOT CMAKE_INSTALL_DATADIR)
|
||||
set(CMAKE_INSTALL_DATADIR "share/games")
|
||||
endif()
|
||||
if(NOT CMAKE_INSTALL_BINDIR)
|
||||
set(CMAKE_INSTALL_BINDIR "games")
|
||||
endif()
|
||||
include(GNUInstallDirs)
|
||||
|
||||
include(Options)
|
||||
set_options()
|
||||
set_directory_options()
|
||||
|
||||
include(Static)
|
||||
set_static_if_needed()
|
||||
|
||||
set(CMAKE_CXX_STANDARD 17)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED YES)
|
||||
set(CMAKE_CXX_EXTENSIONS NO)
|
||||
|
||||
set(CMAKE_EXPORT_COMPILE_COMMANDS YES)
|
||||
|
||||
# An empty target for the tools
|
||||
add_custom_target(tools)
|
||||
|
||||
include(Endian)
|
||||
add_endian_definition()
|
||||
|
||||
include(CompileFlags)
|
||||
compile_flags()
|
||||
|
||||
if(APPLE OR UNIX)
|
||||
add_definitions(-DUNIX)
|
||||
endif()
|
||||
|
||||
if(UNIX)
|
||||
find_package(Doxygen)
|
||||
endif()
|
||||
|
||||
list(APPEND GENERATED_SOURCE_FILES "${CMAKE_BINARY_DIR}/generated/rev.cpp")
|
||||
if(WIN32)
|
||||
list(APPEND GENERATED_SOURCE_FILES "${CMAKE_BINARY_DIR}/generated/ottdres.rc")
|
||||
endif()
|
||||
|
||||
# Generate a target to determine version, which is execute every 'make' run
|
||||
add_custom_target(find_version
|
||||
${CMAKE_COMMAND}
|
||||
-DFIND_VERSION_BINARY_DIR=${CMAKE_BINARY_DIR}/generated
|
||||
-DCPACK_BINARY_DIR=${CMAKE_BINARY_DIR}
|
||||
-DREV_MAJOR=${PROJECT_VERSION_MAJOR}
|
||||
-DREV_MINOR=${PROJECT_VERSION_MINOR}
|
||||
-DWINDOWS=${WIN32}
|
||||
-P "${CMAKE_SOURCE_DIR}/cmake/scripts/FindVersion.cmake"
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||
BYPRODUCTS ${GENERATED_SOURCE_FILES}
|
||||
)
|
||||
|
||||
# Documentation
|
||||
if(DOXYGEN_EXECUTABLE)
|
||||
add_custom_target(docs)
|
||||
add_custom_target(docs_source
|
||||
${CMAKE_COMMAND} -E make_directory ${CMAKE_BINARY_DIR}/docs
|
||||
COMMAND ${DOXYGEN_EXECUTABLE} ${CMAKE_BINARY_DIR}/Doxyfile
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||
COMMENT "Generating documentation for source"
|
||||
)
|
||||
add_dependencies(docs_source
|
||||
find_version
|
||||
)
|
||||
add_dependencies(docs
|
||||
docs_source
|
||||
)
|
||||
endif()
|
||||
|
||||
include(AddCustomXXXTimestamp)
|
||||
|
||||
if(OPTION_TOOLS_ONLY)
|
||||
if(HOST_BINARY_DIR)
|
||||
unset(HOST_BINARY_DIR CACHE)
|
||||
endif()
|
||||
add_subdirectory(${CMAKE_SOURCE_DIR}/src)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if(APPLE)
|
||||
# Avoid searching for headers in Frameworks, and libraries in LIBDIR.
|
||||
set(CMAKE_FIND_FRAMEWORK LAST)
|
||||
endif()
|
||||
|
||||
# Prefer -pthread over -lpthread, which is often the better option of the two.
|
||||
set(CMAKE_THREAD_PREFER_PTHREAD YES)
|
||||
# Make sure we have Threads available.
|
||||
find_package(Threads REQUIRED)
|
||||
|
||||
find_package(ZLIB)
|
||||
find_package(LibLZMA)
|
||||
find_package(LZO)
|
||||
find_package(PNG)
|
||||
|
||||
if(NOT OPTION_DEDICATED)
|
||||
if(NOT WIN32)
|
||||
find_package(Allegro)
|
||||
if(NOT APPLE)
|
||||
find_package(Freetype)
|
||||
find_package(SDL2)
|
||||
if(NOT SDL2_FOUND)
|
||||
find_package(SDL)
|
||||
endif()
|
||||
find_package(Fluidsynth)
|
||||
find_package(Fontconfig)
|
||||
find_package(ICU OPTIONAL_COMPONENTS i18n lx)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
if(APPLE)
|
||||
find_package(Iconv)
|
||||
|
||||
find_library(AUDIOTOOLBOX_LIBRARY AudioToolbox)
|
||||
find_library(AUDIOUNIT_LIBRARY AudioUnit)
|
||||
find_library(COCOA_LIBRARY Cocoa)
|
||||
find_library(QUARTZCORE_LIBRARY QuartzCore)
|
||||
endif()
|
||||
|
||||
if(NOT EMSCRIPTEN AND NOT OPTION_DEDICATED)
|
||||
find_package(OpenGL COMPONENTS OpenGL)
|
||||
endif()
|
||||
|
||||
if(MSVC)
|
||||
find_package(Editbin REQUIRED)
|
||||
endif()
|
||||
|
||||
find_package(SSE)
|
||||
find_package(Xaudio2)
|
||||
|
||||
find_package(Grfcodec)
|
||||
|
||||
include(CheckIPOSupported)
|
||||
check_ipo_supported(RESULT IPO_FOUND)
|
||||
|
||||
show_options()
|
||||
|
||||
if(UNIX AND NOT APPLE AND NOT OPTION_DEDICATED)
|
||||
if(NOT SDL_FOUND AND NOT SDL2_FOUND AND NOT ALLEGRO_FOUND)
|
||||
message(FATAL_ERROR "SDL, SDL2 or Allegro is required for this platform")
|
||||
endif()
|
||||
endif()
|
||||
if(APPLE)
|
||||
if(NOT AUDIOTOOLBOX_LIBRARY)
|
||||
message(FATAL_ERROR "AudioToolbox is required for this platform")
|
||||
endif()
|
||||
if(NOT AUDIOUNIT_LIBRARY)
|
||||
message(FATAL_ERROR "AudioUnit is required for this platform")
|
||||
endif()
|
||||
if(NOT COCOA_LIBRARY)
|
||||
message(FATAL_ERROR "Cocoa is required for this platform")
|
||||
endif()
|
||||
if(NOT QUARTZCORE_LIBRARY)
|
||||
message(FATAL_ERROR "QuartzCore is required for this platform")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(OPTION_PACKAGE_DEPENDENCIES)
|
||||
if(NOT UNIX)
|
||||
message(FATAL_ERROR "Can only package dependencies on Linux")
|
||||
endif()
|
||||
if(OPTION_INSTALL_FHS)
|
||||
message(FATAL_ERROR "Cannot install in FHS folders when we are packaging dependencies")
|
||||
endif()
|
||||
if(${CMAKE_VERSION} VERSION_LESS "3.16.0")
|
||||
message(FATAL_ERROR "OPTION_PACKAGE_DEPENDENCIES can only work with CMake 3.16+; you are using ${CMAKE_VERSION}")
|
||||
endif()
|
||||
|
||||
# If we are packaging dependencies, we do two things:
|
||||
# 1) set the RPATH to include $ORIGIN/lib; $ORIGIN (that literal string)
|
||||
# is a Linux indicator for "path where application is". In CMake, we
|
||||
# have to do this before add_executable() is executed.
|
||||
# 2) copy the libraries that we compile against to the "lib" folder.
|
||||
# This is done in InstallAndPackage.cmake.
|
||||
set(CMAKE_INSTALL_RPATH "\$ORIGIN/lib")
|
||||
set(CMAKE_BUILD_WITH_INSTALL_RPATH ON)
|
||||
endif()
|
||||
|
||||
include(SourceList)
|
||||
|
||||
# Needed by rev.cpp
|
||||
include_directories(${CMAKE_SOURCE_DIR}/src)
|
||||
# Needed by everything that uses Squirrel
|
||||
include_directories(${CMAKE_SOURCE_DIR}/src/3rdparty/squirrel/include)
|
||||
|
||||
include(MSVCFilters)
|
||||
|
||||
add_executable(openttd WIN32 ${GENERATED_SOURCE_FILES})
|
||||
set_target_properties(openttd PROPERTIES OUTPUT_NAME "${BINARY_NAME}")
|
||||
# All other files are added via target_sources()
|
||||
|
||||
if(MSVC)
|
||||
# Add DPI manifest to project; other WIN32 targets get this via ottdres.rc
|
||||
target_sources(openttd PRIVATE "${CMAKE_SOURCE_DIR}/os/windows/openttd.manifest")
|
||||
endif()
|
||||
|
||||
add_subdirectory(${CMAKE_SOURCE_DIR}/bin)
|
||||
add_subdirectory(${CMAKE_SOURCE_DIR}/src)
|
||||
add_subdirectory(${CMAKE_SOURCE_DIR}/media)
|
||||
|
||||
add_dependencies(openttd
|
||||
find_version)
|
||||
|
||||
target_link_libraries(openttd
|
||||
openttd::languages
|
||||
openttd::settings
|
||||
openttd::media
|
||||
openttd::basesets
|
||||
openttd::script_api
|
||||
Threads::Threads
|
||||
)
|
||||
|
||||
if(HAIKU)
|
||||
target_link_libraries(openttd "be" "network" "midi")
|
||||
endif()
|
||||
|
||||
if(IPO_FOUND)
|
||||
set_target_properties(openttd PROPERTIES INTERPROCEDURAL_OPTIMIZATION_RELEASE True)
|
||||
set_target_properties(openttd PROPERTIES INTERPROCEDURAL_OPTIMIZATION_MINSIZEREL True)
|
||||
set_target_properties(openttd PROPERTIES INTERPROCEDURAL_OPTIMIZATION_RELWITHDEBINFO True)
|
||||
endif()
|
||||
set_target_properties(openttd PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_BINARY_DIR}")
|
||||
process_compile_flags()
|
||||
|
||||
include(LinkPackage)
|
||||
link_package(PNG TARGET PNG::PNG ENCOURAGED)
|
||||
link_package(ZLIB TARGET ZLIB::ZLIB ENCOURAGED)
|
||||
link_package(LIBLZMA TARGET LibLZMA::LibLZMA ENCOURAGED)
|
||||
link_package(LZO)
|
||||
|
||||
if(NOT OPTION_DEDICATED)
|
||||
link_package(Fluidsynth)
|
||||
link_package(SDL)
|
||||
link_package(SDL2 TARGET SDL2::SDL2)
|
||||
link_package(Allegro)
|
||||
link_package(FREETYPE TARGET Freetype::Freetype)
|
||||
link_package(Fontconfig TARGET Fontconfig::Fontconfig)
|
||||
link_package(ICU_lx)
|
||||
link_package(ICU_i18n)
|
||||
|
||||
if(SDL2_FOUND AND OPENGL_FOUND AND UNIX)
|
||||
# SDL2 dynamically loads OpenGL if needed, so do not link to OpenGL when
|
||||
# on Linux. For Windows, we need to link to OpenGL as we also have a win32
|
||||
# driver using it.
|
||||
add_definitions(-DWITH_OPENGL)
|
||||
message(STATUS "OpenGL found -- -DWITH_OPENGL -- (via SDL2)")
|
||||
else()
|
||||
link_package(OpenGL TARGET OpenGL::GL)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(APPLE)
|
||||
link_package(Iconv TARGET Iconv::Iconv)
|
||||
|
||||
target_link_libraries(openttd
|
||||
${AUDIOTOOLBOX_LIBRARY}
|
||||
${AUDIOUNIT_LIBRARY}
|
||||
${COCOA_LIBRARY}
|
||||
${QUARTZCORE_LIBRARY}
|
||||
)
|
||||
|
||||
add_definitions(
|
||||
-DWITH_COCOA
|
||||
)
|
||||
endif()
|
||||
|
||||
if(EMSCRIPTEN)
|
||||
add_library(WASM::WASM INTERFACE IMPORTED)
|
||||
|
||||
# Allow heap-growth, and start with a bigger memory size.
|
||||
target_link_libraries(WASM::WASM INTERFACE "-s ALLOW_MEMORY_GROWTH=1")
|
||||
target_link_libraries(WASM::WASM INTERFACE "-s INITIAL_MEMORY=33554432")
|
||||
target_link_libraries(WASM::WASM INTERFACE "-s DISABLE_EXCEPTION_CATCHING=0")
|
||||
add_definitions(-s DISABLE_EXCEPTION_CATCHING=0)
|
||||
|
||||
# Export functions to Javascript.
|
||||
target_link_libraries(WASM::WASM INTERFACE "-s EXPORTED_FUNCTIONS='[\"_main\", \"_em_openttd_add_server\"]' -s EXTRA_EXPORTED_RUNTIME_METHODS='[\"cwrap\"]'")
|
||||
|
||||
# Preload all the files we generate during build.
|
||||
# As we do not compile with FreeType / FontConfig, we also have no way to
|
||||
# render several languages (like Chinese, ..), so where do you draw the
|
||||
# line what languages to include and which not? In the end, especially as
|
||||
# the more languages you add the slower downloading becomes, we decided to
|
||||
# only ship the English language.
|
||||
target_link_libraries(WASM::WASM INTERFACE "--preload-file ${CMAKE_BINARY_DIR}/baseset@/baseset")
|
||||
target_link_libraries(WASM::WASM INTERFACE "--preload-file ${CMAKE_BINARY_DIR}/lang/english.lng@/lang/english.lng")
|
||||
target_link_libraries(WASM::WASM INTERFACE "--preload-file ${CMAKE_SOURCE_DIR}/bin/ai@/ai")
|
||||
target_link_libraries(WASM::WASM INTERFACE "--preload-file ${CMAKE_SOURCE_DIR}/bin/game@/game")
|
||||
|
||||
# We use IDBFS for persistent storage.
|
||||
target_link_libraries(WASM::WASM INTERFACE "-lidbfs.js")
|
||||
|
||||
# Use custom pre-js and shell.html.
|
||||
target_link_libraries(WASM::WASM INTERFACE "--pre-js ${CMAKE_SOURCE_DIR}/os/emscripten/pre.js")
|
||||
target_link_libraries(WASM::WASM INTERFACE "--shell-file ${CMAKE_SOURCE_DIR}/os/emscripten/shell.html")
|
||||
|
||||
# Build the .html (which builds the .js, .wasm, and .data too).
|
||||
set_target_properties(openttd PROPERTIES SUFFIX ".html")
|
||||
target_link_libraries(openttd WASM::WASM)
|
||||
endif()
|
||||
|
||||
if(NOT PERSONAL_DIR STREQUAL "(not set)")
|
||||
add_definitions(
|
||||
-DWITH_PERSONAL_DIR
|
||||
-DPERSONAL_DIR="${PERSONAL_DIR}"
|
||||
)
|
||||
endif()
|
||||
|
||||
if(NOT SHARED_DIR STREQUAL "(not set)")
|
||||
add_definitions(
|
||||
-DWITH_SHARED_DIR
|
||||
-DSHARED_DIR="${SHARED_DIR}"
|
||||
)
|
||||
endif()
|
||||
|
||||
if(NOT GLOBAL_DIR STREQUAL "(not set)")
|
||||
add_definitions(
|
||||
-DGLOBAL_DATA_DIR="${GLOBAL_DIR}"
|
||||
)
|
||||
endif()
|
||||
|
||||
link_package(SSE)
|
||||
|
||||
add_definitions_based_on_options()
|
||||
|
||||
if(WIN32)
|
||||
add_definitions(
|
||||
-DUNICODE
|
||||
-D_UNICODE
|
||||
-DWITH_UNISCRIBE
|
||||
-DPSAPI_VERSION=1
|
||||
)
|
||||
|
||||
target_link_libraries(openttd
|
||||
ws2_32
|
||||
winmm
|
||||
imm32
|
||||
usp10
|
||||
psapi
|
||||
)
|
||||
endif()
|
||||
|
||||
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||
add_definitions(-DPOINTER_IS_64BIT)
|
||||
endif()
|
||||
|
||||
include(CreateRegression)
|
||||
create_regression()
|
||||
|
||||
if(APPLE OR WIN32)
|
||||
find_package(Pandoc)
|
||||
endif()
|
||||
|
||||
include(InstallAndPackage)
|
161
COMPILING.md
161
COMPILING.md
@@ -2,29 +2,28 @@
|
||||
|
||||
## Required/optional libraries
|
||||
|
||||
OpenTTD makes use of the following external libraries:
|
||||
The following libraries are used by OpenTTD for:
|
||||
|
||||
- (encouraged) zlib: (de)compressing of old (0.3.0-1.0.5) savegames, content downloads,
|
||||
- zlib: (de)compressing of old (0.3.0-1.0.5) savegames, content downloads,
|
||||
heightmaps
|
||||
- (encouraged) liblzma: (de)compressing of savegames (1.1.0 and later)
|
||||
- (encouraged) libpng: making screenshots and loading heightmaps
|
||||
- (optional) liblzo2: (de)compressing of old (pre 0.3.0) savegames
|
||||
|
||||
For Linux, the following additional libraries are used (for non-dedicated only):
|
||||
|
||||
- libSDL2: hardware access (video, sound, mouse)
|
||||
- liblzo2: (de)compressing of old (pre 0.3.0) savegames
|
||||
- liblzma: (de)compressing of savegames (1.1.0 and later)
|
||||
- libpng: making screenshots and loading heightmaps
|
||||
- libfreetype: loading generic fonts and rendering them
|
||||
- libfontconfig: searching for fonts, resolving font names to actual fonts
|
||||
- libicu: handling of right-to-left scripts (e.g. Arabic and Persian) and
|
||||
natural sorting of strings
|
||||
natural sorting of strings (Linux only)
|
||||
- libSDL2: hardware access (video, sound, mouse) (not required for Windows or macOS)
|
||||
|
||||
OpenTTD does not require any of the libraries to be present, but without
|
||||
liblzma you cannot open most recent savegames and without zlib you cannot
|
||||
open most older savegames or use the content downloading system.
|
||||
Without libSDL/liballegro on non-Windows and non-macOS machines you have
|
||||
no graphical user interface; you would be building a dedicated server.
|
||||
|
||||
## Windows
|
||||
## Windows:
|
||||
|
||||
You need Microsoft Visual Studio 2017 or more recent.
|
||||
You need Microsoft Visual Studio 2015 Update 3 or newer.
|
||||
|
||||
You can download the free Visual Studio Community Edition from Microsoft at
|
||||
https://visualstudio.microsoft.com/vs/community/.
|
||||
@@ -57,94 +56,86 @@ To install both the x64 (64bit) and x86 (32bit) variants (though only one is nec
|
||||
.\vcpkg install liblzma:x86-windows-static libpng:x86-windows-static lzo:x86-windows-static zlib:x86-windows-static
|
||||
```
|
||||
|
||||
You can open the folder (as a CMake project). CMake will be detected, and you can compile from there.
|
||||
If libraries are installed but not found, you need to set VCPKG_TARGET_TRIPLET in CMake parameters.
|
||||
For Visual Studio 2017 you also need to set CMAKE_TOOLCHAIN_FILE.
|
||||
(Typical values are shown in the MSVC project file command line example)
|
||||
Open the relevant project file and it should build automatically.
|
||||
- VS 2015: projects/openttd_vs140.sln
|
||||
- VS 2017: projects/openttd_vs141.sln
|
||||
- VS 2019: projects/openttd_vs142.sln
|
||||
|
||||
Alternatively, you can create a MSVC project file via CMake. For this
|
||||
either download CMake from https://cmake.org/download/ or use the version
|
||||
that comes with vcpkg. After that, you can run something similar to this:
|
||||
Set the build mode to `Release` in
|
||||
`Build > Configuration manager > Active solution configuration`.
|
||||
You can now compile.
|
||||
|
||||
```powershell
|
||||
mkdir build
|
||||
cd build
|
||||
cmake.exe .. -G'Visual Studio 16 2019' -DCMAKE_TOOLCHAIN_FILE="<location of vcpkg>\vcpkg\scripts\buildsystems\vcpkg.cmake" -DVCPKG_TARGET_TRIPLET="x64-windows-static"
|
||||
```
|
||||
If everything works well the binary should be in `objs\Win[32|64]\Release\openttd.exe`
|
||||
and in `bin\openttd.exe`
|
||||
|
||||
Change `<location of vcpkg>` to where you have installed vcpkg. After this
|
||||
in the build folder are MSVC project files. MSVC can rebuild the project
|
||||
files himself via the `ZERO_CHECK` project.
|
||||
The OpenTTD wiki may provide additional help with [compiling for Windows](https://wiki.openttd.org/Compiling_on_Windows_using_Microsoft_Visual_C%2B%2B_2015).
|
||||
|
||||
## All other platforms
|
||||
Minimum required version of CMake is 3.9.
|
||||
By default this produces a Debug build with assertations enabled.
|
||||
This is a far slower build than release builds.
|
||||
You can also build OpenTTD with MSYS2/MinGW-w64 or Cygwin/MinGW using the Makefile. The OpenTTD wiki may provide additional help with [MSYS2](https://wiki.openttd.org/Compiling_on_Windows_using_MSYS2)
|
||||
|
||||
```bash
|
||||
mkdir build
|
||||
cd build
|
||||
cmake ..
|
||||
make
|
||||
```
|
||||
## Linux, Unix, Solaris:
|
||||
|
||||
For more information on how to use CMake (including how to make Release builds),
|
||||
we urge you to read [their excellent manual](https://cmake.org/cmake/help/latest/guide/user-interaction/index.html).
|
||||
OpenTTD can be built with GNU '`make`'. On non-GNU systems it is called '`gmake`'.
|
||||
However, for the first build one has to do a '`./configure`' first.
|
||||
|
||||
## CMake Options
|
||||
The OpenTTD wiki may provide additional help with:
|
||||
|
||||
Via CMake, several options can be influenced to get different types of
|
||||
builds.
|
||||
- [compiling for Linux and *BSD](https://wiki.openttd.org/Compiling_on_%28GNU/%29Linux_and_*BSD)
|
||||
- [compiling for Solaris](https://wiki.openttd.org/Compiling_on_Solaris)
|
||||
|
||||
- `-DCMAKE_BUILD_TYPE=RelWithDebInfo`: build a release build. This is
|
||||
significant faster than a debug build, but has far less useful information
|
||||
in case of a crash.
|
||||
- `-DOPTION_DEDICATED=ON`: build OpenTTD without a GUI. Useful if you are
|
||||
running a headless server, as it requires less libraries to operate.
|
||||
- `-DOPTION_USE_ASSERTS=OFF`: disable asserts. Use with care, as assert
|
||||
statements capture early signs of trouble. Release builds have them
|
||||
disabled by default.
|
||||
- `-DOPTION_USE_THREADS=OFF`: disable the use of threads. This will block
|
||||
the interface in many places, and in general gives a worse experience of
|
||||
the game. Use with care.
|
||||
- `-DOPTION_TOOLS_ONLY=ON`: only build tools like `strgen`. Does not build
|
||||
the game itself. Useful for cross-compiling.
|
||||
|
||||
## macOS:
|
||||
|
||||
Use '`make`' or Xcode (which will then call make for you)
|
||||
This will give you a binary for your CPU type (PPC/Intel)
|
||||
However, for the first build one has to do a '`./configure`' first.
|
||||
To make a universal binary type '`./configure --enable-universal`'
|
||||
instead of '`./configure`'.
|
||||
|
||||
The OpenTTD wiki may provide additional help with [compiling for macOS](https://wiki.openttd.org/Compiling_on_Mac_OS_X).
|
||||
|
||||
## Haiku:
|
||||
|
||||
Use '`make`', but do a '`./configure`' before the first build.
|
||||
|
||||
The OpenTTD wiki may provide additional help with [compiling for Haiku](https://wiki.openttd.org/Compiling_on_Haiku).
|
||||
|
||||
## OS/2:
|
||||
|
||||
A comprehensive GNU build environment is required to build the OS/2 version.
|
||||
|
||||
The OpenTTD wiki may provide additional help with [compiling for OS/2](https://wiki.openttd.org/Compiling_on_OS/2).
|
||||
|
||||
## Supported compilers
|
||||
|
||||
Every compiler that is supported by CMake and supports C++17, should be
|
||||
able to compile OpenTTD. As the exact list of compilers changes constantly,
|
||||
we refer to the compiler manual to see if it supports C++17, and to CMake
|
||||
to see if it supports your compiler.
|
||||
The following compilers are tested with and known to compile OpenTTD:
|
||||
|
||||
- Microsoft Visual C++ (MSVC) 2015, 2017 and 2019.
|
||||
- GNU Compiler Collection (GCC) 4.8 - 9.
|
||||
- Clang/LLVM 3.9 - 8
|
||||
|
||||
The following compilers are known not to compile OpenTTD:
|
||||
|
||||
In general, this is because these old versions do not (fully) support modern
|
||||
C++11 language features.
|
||||
|
||||
- Microsoft Visual C++ (MSVC) 2013 and earlier.
|
||||
- GNU Compiler Collection (GCC) 4.7 and earlier.
|
||||
- Clang/LLVM 3.8 and earlier.
|
||||
|
||||
If any of these, or any other, compilers can compile OpenTTD, let us know.
|
||||
Pull requests to support more compilers are welcome.
|
||||
|
||||
## Compilation of base sets
|
||||
|
||||
To recompile the extra graphics needed to play with the original Transport
|
||||
Tycoon Deluxe graphics you need GRFCodec (which includes NFORenum) as well.
|
||||
GRFCodec can be found at
|
||||
https://www.openttd.org/downloads/grfcodec-releases/latest.html.
|
||||
GRFCodec can be found at https://www.openttd.org/download-grfcodec.
|
||||
The compilation of these extra graphics does generally not happen, unless
|
||||
you remove the graphics file using '`make maintainer-clean`'.
|
||||
|
||||
Having GRFCodec installed can cause regeneration of the `.grf` files, which
|
||||
are written in the source directory. This can leave your repository in a
|
||||
modified state, as different GRFCodec versions can cause binary differences
|
||||
in the resulting `.grf` files. Also translations might have been added for
|
||||
the base sets which are not yet included in the base set information files.
|
||||
To avoid this behaviour, disable GRFCodec (and NFORenum) in CMake cache
|
||||
(`GRFCODEC_EXECUTABLE` and `NFORENUM_EXECUTABLE`).
|
||||
|
||||
## Developers settings
|
||||
|
||||
You can control some flags directly via `CXXFLAGS` (any combination
|
||||
of these flags will work fine too):
|
||||
|
||||
- `-DRANDOM_DEBUG`: this helps with debugging desyncs.
|
||||
- `-fno-inline`: this avoids creating inline functions; this can make
|
||||
debugging a lot easier.
|
||||
- `-O0`: this disables all optimizations; this can make debugging a
|
||||
lot easier.
|
||||
- `-p`: this enables profiling.
|
||||
|
||||
Always use a clean buildfolder if you changing `CXXFLAGS`, as this
|
||||
value is otherwise cached. Example use:
|
||||
|
||||
`CXXFLAGS="-fno-inline" cmake ..`
|
||||
Re-compilation of the base sets, thus also use of '`--maintainer-clean`' can
|
||||
leave the repository in a modified state as different grfcodec versions can
|
||||
cause binary differences in the resulting grf. Also translations might have
|
||||
been added for the base sets which are not yet included in the base set
|
||||
information files. Use the configure option '`--without-grfcodec`' to avoid
|
||||
modification of the base set files by the build process.
|
||||
|
@@ -14,7 +14,7 @@ In return, they should reciprocate that respect in addressing your issue or asse
|
||||
The [issue tracker](https://github.com/OpenTTD/OpenTTD/issues) is the preferred channel for [bug reports](#bug-reports), but please respect the following restrictions:
|
||||
|
||||
* Please **do not** use the issue tracker for help playing or using OpenTTD.
|
||||
Please try [irc](https://wiki.openttd.org/en/Development/IRC%20channel), or the [forums](https://www.tt-forums.net/)
|
||||
Please try [irc](https://wiki.openttd.org/IRC_channel), or the [forums](https://www.tt-forums.net/)
|
||||
|
||||
* Please **do not** derail or troll issues. Keep the discussion on topic and respect the opinions of others.
|
||||
|
||||
@@ -23,9 +23,7 @@ Use [GitHub's "reactions" feature](https://github.com/blog/2119-add-reactions-to
|
||||
We reserve the right to delete comments which violate this rule.
|
||||
|
||||
* Please **do not** open issues or pull requests regarding add-on content in NewGRF, GameScripts, AIs, etc.
|
||||
These are created by third-parties. Please try [irc](https://wiki.openttd.org/en/Development/IRC%20channel) or the [forums](https://www.tt-forums.net/) to discuss these.
|
||||
|
||||
* Please use [the web translator](https://translator.openttd.org/) to submit corrections and improvements to translations of the game.
|
||||
These are created by third-parties. Please try [irc](https://wiki.openttd.org/IRC_channel) or the [forums](https://www.tt-forums.net/) to discuss these.
|
||||
|
||||
|
||||
## Bug reports
|
||||
@@ -35,16 +33,16 @@ Good bug reports are extremely helpful, so thanks!
|
||||
|
||||
Guidelines for bug reports:
|
||||
|
||||
0. Please don't report issues with games where you changed NewGRFs mid-game. (This can be verified with the `gamelog` console command in-game.)
|
||||
0. Please don't report issues with games where you changed NewGRFs.
|
||||
|
||||
1. Please don't report issues with modified versions of OpenTTD (patchpacks, unofficial ports, and similar).
|
||||
1. Please don't report issues with modified versions of OpenTTD (patchpacks and similar).
|
||||
|
||||
2. **Use the GitHub issue search** — check if the issue has already been
|
||||
2. **Use the GitHub issue search** --- check if the issue has already been
|
||||
reported.
|
||||
|
||||
3. **Check if the issue has been fixed** — try to reproduce it using the latest `nightly` build of OpenTTD, available from https://www.openttd.org
|
||||
3. **Check if the issue has been fixed** --- try to reproduce it using the latest `nightly` build of OpenTTD, available from https://www.openttd.org
|
||||
|
||||
4. **Isolate the problem** — ideally create reproducible steps with an attached savegame and screenshots. Try to use few or no NewGRFs, AIs etc if possible.
|
||||
4. **Isolate the problem** --- ideally create reproduceable steps with an attached savegame and screenshots. Try to use few or no NewGRFs, AIs etc if possible.
|
||||
|
||||
A good bug report shouldn't leave others needing to chase you up for more information.
|
||||
Please try to be as detailed as possible in your report.
|
||||
@@ -94,7 +92,7 @@ Although we really appreciate feedback and ideas, we will close feature requests
|
||||
|
||||
Many of those ideas etc do have a place on the [forums](https://www.tt-forums.net); and if enough people like it, someone will stand up and make it.
|
||||
|
||||
It's usually best discuss in [irc](https://wiki.openttd.org/en/Development/IRC%20channel) before opening a feature request or working on a large feature in a fork.
|
||||
It's usually best discuss in [irc](https://wiki.openttd.org/IRC_channel) before opening a feature request or working on a large feature in a fork.
|
||||
Discussion in irc can take time, but it can be productive and avoid disappointment :)
|
||||
|
||||
|
||||
@@ -108,7 +106,7 @@ Pull requests should fit with the [goals of the project](./CONTRIBUTING.md#proje
|
||||
|
||||
Every pull request should have a clear scope, with no unrelated commits.
|
||||
|
||||
[Code style](https://wiki.openttd.org/en/Development/Coding%20style) must be complied with for pull requests to be accepted; this also includes [commit message format](https://wiki.openttd.org/en/Development/Coding%20style#commit-message).
|
||||
[Code style](https://wiki.openttd.org/Coding_style) must be complied with for pull requests to be accepted; this also includes [commit message format](https://wiki.openttd.org/Coding_style#Commit_message).
|
||||
|
||||
Adhering to the following process is the best way to get your work included in the project:
|
||||
|
||||
@@ -136,7 +134,7 @@ contain your feature, change, or fix:
|
||||
git checkout upstream/master -b <topic-branch-name>
|
||||
```
|
||||
|
||||
4. Commit your changes in logical chunks. Please adhere to these [git commit message guidelines](https://wiki.openttd.org/en/Development/Coding%20style#commit-message) or your code is unlikely to be merged into the main project.
|
||||
4. Commit your changes in logical chunks. Please adhere to these [git commit message guidelines](https://wiki.openttd.org/Commit_style#Commit_message) or your code is unlikely to be merged into the main project.
|
||||
Use Git's [interactive rebase](https://help.github.com/articles/interactive-rebase) feature to tidy up your commits before making them public.
|
||||
|
||||
5. Locally rebase the upstream development branch into your topic branch:
|
||||
@@ -172,14 +170,14 @@ The results of the CI tests will show on your pull request.
|
||||
By clicking on Details you can further zoom in; in case of a failure it will show you why it failed.
|
||||
In case of success it will report how awesome you were.
|
||||
|
||||
Tip: [commit message format](https://wiki.openttd.org/en/Development/Coding%20style#commit-message) is a common reason for pull requests to fail validation.
|
||||
Tip: [commit message format](https://wiki.openttd.org/Coding_style#Commit_message) is a common reason for pull requests to fail validation.
|
||||
|
||||
|
||||
### Are there any development docs?
|
||||
|
||||
There is no single source for OpenTTD development docs. It's a complex project with a long history, and multiple APIs.
|
||||
|
||||
A good entry point is [Development](https://wiki.openttd.org/en/Development/) on the OpenTTD wiki; this provides links to wiki documentation and other sources.
|
||||
A good entry point is [Development](https://wiki.openttd.org/Development) on the OpenTTD wiki; this provides links to wiki documentation and other sources.
|
||||
|
||||
The GitHub repo also includes some non-comprehensive documentation in [/docs](./docs).
|
||||
|
||||
|
@@ -1,12 +0,0 @@
|
||||
# Make the current version available to CPack
|
||||
set(CPACK_PACKAGE_VERSION "@REV_VERSION@")
|
||||
|
||||
# Name the output file with the correct version
|
||||
string(REPLACE "#CPACK_PACKAGE_VERSION#" "@REV_VERSION@" CPACK_PACKAGE_FILE_NAME "${CPACK_PACKAGE_FILE_NAME}")
|
||||
|
||||
if (CPACK_BUNDLE_PLIST_SOURCE)
|
||||
# Rewrite the Info.plist.in to contain the correct version
|
||||
file(READ ${CPACK_BUNDLE_PLIST_SOURCE} INFO_PLIST_CONTENT)
|
||||
string(REPLACE "#CPACK_PACKAGE_VERSION#" "@REV_VERSION@" INFO_PLIST_CONTENT "${INFO_PLIST_CONTENT}")
|
||||
file(WRITE ${CPACK_BUNDLE_PLIST} "${INFO_PLIST_CONTENT}")
|
||||
endif (CPACK_BUNDLE_PLIST_SOURCE)
|
@@ -14,6 +14,7 @@
|
||||
- Ingo von Borstel (planetmaker) - General coding, Support (since 1.1)
|
||||
- Remko Bijker (Rubidium) - Lead coder and way more (since 0.4.5)
|
||||
- José Soler (Terkhen) - General coding (since 1.0)
|
||||
- Leif Linse (Zuu) - AI/Game Script (since 1.2)
|
||||
|
||||
### Inactive Developers:
|
||||
|
||||
@@ -27,7 +28,6 @@
|
||||
- Christoph Mallon (Tron) - Programmer, code correctness police (0.3 - 0.5)
|
||||
- Patric Stout (TrueBrain) - NoProgrammer (0.3 - 1.2), sys op (active)
|
||||
- Thijs Marinussen (Yexo) - AI Framework, General (0.6 - 1.3)
|
||||
- Leif Linse (Zuu) - AI/Game Script (1.2 - 1.6)
|
||||
|
||||
### Retired Developers:
|
||||
|
||||
|
@@ -8,10 +8,10 @@
|
||||
#---------------------------------------------------------------------------
|
||||
DOXYFILE_ENCODING = UTF-8
|
||||
PROJECT_NAME = "OpenTTD Source"
|
||||
PROJECT_NUMBER = ${REV_VERSION}
|
||||
PROJECT_NUMBER = $(VERSION)
|
||||
PROJECT_BRIEF =
|
||||
PROJECT_LOGO =
|
||||
OUTPUT_DIRECTORY = ${CPACK_BINARY_DIR}/docs/source/
|
||||
OUTPUT_DIRECTORY = docs/source/
|
||||
CREATE_SUBDIRS = YES
|
||||
ALLOW_UNICODE_NAMES = NO
|
||||
OUTPUT_LANGUAGE = English
|
||||
@@ -306,14 +306,16 @@ SKIP_FUNCTION_MACROS = YES
|
||||
# Configuration options related to external references
|
||||
#---------------------------------------------------------------------------
|
||||
TAGFILES =
|
||||
GENERATE_TAGFILE = ${CPACK_BINARY_DIR}/docs/openttd.tag
|
||||
GENERATE_TAGFILE = objs/openttd.tag
|
||||
ALLEXTERNALS = NO
|
||||
EXTERNAL_GROUPS = YES
|
||||
EXTERNAL_PAGES = YES
|
||||
PERL_PATH = /usr/bin/perl
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the dot tool
|
||||
#---------------------------------------------------------------------------
|
||||
CLASS_DIAGRAMS = YES
|
||||
MSCGEN_PATH =
|
||||
DIA_PATH =
|
||||
HIDE_UNDOC_RELATIONS = YES
|
||||
HAVE_DOT = NO
|
221
Makefile.bundle.in
Normal file
221
Makefile.bundle.in
Normal file
@@ -0,0 +1,221 @@
|
||||
# This file is part of OpenTTD.
|
||||
# OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
# OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
#
|
||||
# Creation of bundles
|
||||
#
|
||||
|
||||
# The revision is needed for the bundle name and creating an OSX application bundle.
|
||||
# Detect the revision
|
||||
VERSIONS := $(shell AWK="$(AWK)" "$(ROOT_DIR)/findversion.sh")
|
||||
VERSION := $(shell echo "$(VERSIONS)" | cut -f 1 -d' ')
|
||||
|
||||
# Make sure we have something in VERSION
|
||||
ifeq ($(VERSION),)
|
||||
VERSION := norev000
|
||||
endif
|
||||
|
||||
ifndef BUNDLE_NAME
|
||||
BUNDLE_NAME = openttd-custom-$(VERSION)-$(OS)
|
||||
endif
|
||||
|
||||
# An OSX application bundle needs the data files, lang files and openttd executable in a different location.
|
||||
ifdef OSXAPP
|
||||
AI_DIR = $(BUNDLE_DIR)/$(OSXAPP)/Contents/Resources/ai
|
||||
GAME_DIR = $(BUNDLE_DIR)/$(OSXAPP)/Contents/Resources/game
|
||||
BASESET_DIR = $(BUNDLE_DIR)/$(OSXAPP)/Contents/Resources/baseset
|
||||
LANG_DIR = $(BUNDLE_DIR)/$(OSXAPP)/Contents/Resources/lang
|
||||
TTD_DIR = $(BUNDLE_DIR)/$(OSXAPP)/Contents/MacOS
|
||||
else
|
||||
AI_DIR = $(BUNDLE_DIR)/ai
|
||||
GAME_DIR = $(BUNDLE_DIR)/game
|
||||
BASESET_DIR = $(BUNDLE_DIR)/baseset
|
||||
LANG_DIR = $(BUNDLE_DIR)/lang
|
||||
TTD_DIR = $(BUNDLE_DIR)
|
||||
endif
|
||||
|
||||
bundle: all
|
||||
@echo '[BUNDLE] Constructing bundle'
|
||||
$(Q)rm -rf "$(BUNDLE_DIR)"
|
||||
$(Q)mkdir -p "$(BUNDLE_DIR)"
|
||||
$(Q)mkdir -p "$(BUNDLE_DIR)/docs"
|
||||
$(Q)mkdir -p "$(BUNDLE_DIR)/media"
|
||||
$(Q)mkdir -p "$(BUNDLE_DIR)/scripts"
|
||||
$(Q)mkdir -p "$(TTD_DIR)"
|
||||
$(Q)mkdir -p "$(AI_DIR)"
|
||||
$(Q)mkdir -p "$(GAME_DIR)"
|
||||
$(Q)mkdir -p "$(BASESET_DIR)"
|
||||
$(Q)mkdir -p "$(LANG_DIR)"
|
||||
ifdef OSXAPP
|
||||
$(Q)mkdir -p "$(BUNDLE_DIR)/$(OSXAPP)/Contents/Resources"
|
||||
$(Q)echo "APPL????" > "$(BUNDLE_DIR)/$(OSXAPP)/Contents/PkgInfo"
|
||||
$(Q)cp "$(ROOT_DIR)/os/macosx/openttd.icns" "$(BUNDLE_DIR)/$(OSXAPP)/Contents/Resources/openttd.icns"
|
||||
$(Q)$(ROOT_DIR)/os/macosx/plistgen.sh "$(BUNDLE_DIR)/$(OSXAPP)" "$(VERSION)"
|
||||
$(Q)cp "$(ROOT_DIR)/os/macosx/splash.png" "$(BASESET_DIR)"
|
||||
endif
|
||||
ifeq ($(OS),UNIX)
|
||||
$(Q)cp "$(ROOT_DIR)/media/openttd.32.bmp" "$(BASESET_DIR)/"
|
||||
endif
|
||||
$(Q)cp "$(BIN_DIR)/$(TTD)" "$(TTD_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/ai/"compat_*.nut "$(AI_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/game/"compat_*.nut "$(GAME_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/baseset/"*.grf "$(BASESET_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/baseset/"*.obg "$(BASESET_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/baseset/"*.obs "$(BASESET_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/baseset/opntitle.dat" "$(BASESET_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/baseset/"*.obm "$(BASESET_DIR)/"
|
||||
$(Q)cp "$(BIN_DIR)/lang/"*.lng "$(LANG_DIR)/"
|
||||
$(Q)cp "$(ROOT_DIR)/README.md" "$(BUNDLE_DIR)/"
|
||||
$(Q)cp "$(ROOT_DIR)/COPYING.md" "$(BUNDLE_DIR)/"
|
||||
$(Q)cp "$(ROOT_DIR)/known-bugs.txt" "$(BUNDLE_DIR)/"
|
||||
$(Q)cp "$(ROOT_DIR)/docs/multiplayer.md" "$(BUNDLE_DIR)/docs/"
|
||||
$(Q)cp "$(ROOT_DIR)/changelog.txt" "$(BUNDLE_DIR)/"
|
||||
ifdef MAN_DIR
|
||||
$(Q)mkdir -p "$(BUNDLE_DIR)/man/"
|
||||
$(Q)cp "$(ROOT_DIR)/docs/openttd.6" "$(BUNDLE_DIR)/man/"
|
||||
$(Q)gzip -9 "$(BUNDLE_DIR)/man/openttd.6"
|
||||
endif
|
||||
$(Q)cp "$(ROOT_DIR)/media/openttd.32.xpm" "$(BUNDLE_DIR)/media/"
|
||||
$(Q)cp "$(ROOT_DIR)/media/openttd."*.png "$(BUNDLE_DIR)/media/"
|
||||
$(Q)cp "$(BIN_DIR)/scripts/"* "$(BUNDLE_DIR)/scripts/"
|
||||
ifdef MENU_DIR
|
||||
$(Q)cp "$(ROOT_DIR)/media/openttd.desktop" "$(BUNDLE_DIR)/media/"
|
||||
$(Q)$(AWK) -f "$(ROOT_DIR)/media/openttd.desktop.translation.awk" "$(SRC_DIR)/lang/"*.txt | LC_ALL=C $(SORT) | $(AWK) -f "$(ROOT_DIR)/media/openttd.desktop.filter.awk" >> "$(BUNDLE_DIR)/media/openttd.desktop"
|
||||
$(Q)sed s/=openttd/=$(BINARY_NAME)/g "$(BUNDLE_DIR)/media/openttd.desktop" > "$(ROOT_DIR)/media/openttd.desktop.install"
|
||||
endif
|
||||
ifeq ($(TTD), openttd.exe)
|
||||
$(Q)unix2dos "$(BUNDLE_DIR)/docs/"* "$(BUNDLE_DIR)/README.md" "$(BUNDLE_DIR)/COPYING.md" "$(BUNDLE_DIR)/changelog.txt" "$(BUNDLE_DIR)/known-bugs.txt"
|
||||
endif
|
||||
|
||||
### Packing the current bundle into several compressed file formats ###
|
||||
#
|
||||
# Zips & dmgs do not contain a root folder, i.e. they have files in the root of the zip/dmg.
|
||||
# gzip, bzip2 and lha archives have a root folder, with the same name as the bundle.
|
||||
#
|
||||
# One can supply a custom name by adding BUNDLE_NAME:=<name> to the make command.
|
||||
#
|
||||
bundle_zip: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).zip'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)"
|
||||
$(Q)cd "$(BUNDLE_DIR)" && zip -r $(shell if test -z "$(VERBOSE)"; then echo '-q'; fi) "$(BUNDLES_DIR)/$(BUNDLE_NAME).zip" .
|
||||
|
||||
bundle_7z: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).7z'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)"
|
||||
$(Q)cd "$(BUNDLE_DIR)" && 7z a "$(BUNDLES_DIR)/$(BUNDLE_NAME).7z" .
|
||||
|
||||
bundle_gzip: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).tar.gz'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)/.gzip/$(BUNDLE_NAME)"
|
||||
$(Q)cp -R "$(BUNDLE_DIR)/"* "$(BUNDLES_DIR)/.gzip/$(BUNDLE_NAME)/"
|
||||
$(Q)cd "$(BUNDLES_DIR)/.gzip" && tar -zc$(shell if test -n "$(VERBOSE)"; then echo 'v'; fi)f "$(BUNDLES_DIR)/$(BUNDLE_NAME).tar.gz" "$(BUNDLE_NAME)"
|
||||
$(Q)rm -rf "$(BUNDLES_DIR)/.gzip"
|
||||
|
||||
bundle_bzip2: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).tar.bz2'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)/.bzip2/$(BUNDLE_NAME)"
|
||||
$(Q)cp -R "$(BUNDLE_DIR)/"* "$(BUNDLES_DIR)/.bzip2/$(BUNDLE_NAME)/"
|
||||
$(Q)cd "$(BUNDLES_DIR)/.bzip2" && tar -jc$(shell if test -n "$(VERBOSE)"; then echo 'v'; fi)f "$(BUNDLES_DIR)/$(BUNDLE_NAME).tar.bz2" "$(BUNDLE_NAME)"
|
||||
$(Q)rm -rf "$(BUNDLES_DIR)/.bzip2"
|
||||
|
||||
bundle_lzma: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).tar.lzma'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)/.lzma/$(BUNDLE_NAME)"
|
||||
$(Q)cp -R "$(BUNDLE_DIR)/"* "$(BUNDLES_DIR)/.lzma/$(BUNDLE_NAME)/"
|
||||
$(Q)cd "$(BUNDLES_DIR)/.lzma" && tar --lzma -c$(shell if test -n "$(VERBOSE)"; then echo 'v'; fi)f "$(BUNDLES_DIR)/$(BUNDLE_NAME).tar.lzma" "$(BUNDLE_NAME)"
|
||||
$(Q)rm -rf "$(BUNDLES_DIR)/.lzma"
|
||||
|
||||
bundle_xz: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).tar.xz'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)/.xz/$(BUNDLE_NAME)"
|
||||
$(Q)cp -R "$(BUNDLE_DIR)/"* "$(BUNDLES_DIR)/.xz/$(BUNDLE_NAME)/"
|
||||
$(Q)cd "$(BUNDLES_DIR)/.xz" && tar --xz -c$(shell if test -n "$(VERBOSE)"; then echo 'v'; fi)f "$(BUNDLES_DIR)/$(BUNDLE_NAME).tar.xz" "$(BUNDLE_NAME)"
|
||||
$(Q)rm -rf "$(BUNDLES_DIR)/.xz"
|
||||
|
||||
bundle_lha: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).lha'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)/.lha/$(BUNDLE_NAME)"
|
||||
$(Q)cp -R "$(BUNDLE_DIR)/"* "$(BUNDLES_DIR)/.lha/$(BUNDLE_NAME)/"
|
||||
$(Q)cd "$(BUNDLES_DIR)/.lha" && lha ao6 "$(BUNDLES_DIR)/$(BUNDLE_NAME).lha" "$(BUNDLE_NAME)"
|
||||
$(Q)rm -rf "$(BUNDLES_DIR)/.lha"
|
||||
|
||||
bundle_dmg: bundle
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).dmg'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)/OpenTTD $(VERSION)"
|
||||
$(Q)cp -R "$(BUNDLE_DIR)/" "$(BUNDLES_DIR)/OpenTTD $(VERSION)"
|
||||
$(Q)hdiutil create -ov -format UDZO -srcfolder "$(BUNDLES_DIR)/OpenTTD $(VERSION)" "$(BUNDLES_DIR)/$(BUNDLE_NAME).dmg"
|
||||
$(Q)rm -fr "$(BUNDLES_DIR)/OpenTTD $(VERSION)"
|
||||
|
||||
bundle_exe: all
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).exe'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)"
|
||||
$(Q)unix2dos "$(ROOT_DIR)/docs/"* "$(ROOT_DIR)/README.md" "$(ROOT_DIR)/COPYING.md" "$(ROOT_DIR)/changelog.txt" "$(ROOT_DIR)/known-bugs.txt"
|
||||
$(Q)cd $(ROOT_DIR)/os/windows/installer && makensis.exe //DVERSION_INCLUDE=version_$(PLATFORM).txt install.nsi
|
||||
$(Q)mv $(ROOT_DIR)/os/windows/installer/*$(PLATFORM).exe "$(BUNDLES_DIR)/$(BUNDLE_NAME).exe"
|
||||
|
||||
ifdef OSXAPP
|
||||
install:
|
||||
@echo '[INSTALL] Cannot install the OSX Application Bundle'
|
||||
else
|
||||
install: bundle
|
||||
@echo '[INSTALL] Installing OpenTTD'
|
||||
$(Q)install -d "$(INSTALL_BINARY_DIR)"
|
||||
$(Q)install -d "$(INSTALL_ICON_DIR)"
|
||||
$(Q)install -d "$(INSTALL_DATA_DIR)/ai"
|
||||
$(Q)install -d "$(INSTALL_DATA_DIR)/game"
|
||||
$(Q)install -d "$(INSTALL_DATA_DIR)/baseset"
|
||||
$(Q)install -d "$(INSTALL_DATA_DIR)/lang"
|
||||
$(Q)install -d "$(INSTALL_DATA_DIR)/scripts"
|
||||
ifeq ($(TTD), openttd.exe)
|
||||
$(Q)install -m 755 "$(BUNDLE_DIR)/$(TTD)" "$(INSTALL_BINARY_DIR)/${BINARY_NAME}.exe"
|
||||
else
|
||||
$(Q)install -m 755 "$(BUNDLE_DIR)/$(TTD)" "$(INSTALL_BINARY_DIR)/${BINARY_NAME}"
|
||||
endif
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/lang/"* "$(INSTALL_DATA_DIR)/lang"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/ai/"* "$(INSTALL_DATA_DIR)/ai"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/game/"* "$(INSTALL_DATA_DIR)/game"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/baseset/"* "$(INSTALL_DATA_DIR)/baseset"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/scripts/"* "$(INSTALL_DATA_DIR)/scripts"
|
||||
ifndef DO_NOT_INSTALL_DOCS
|
||||
$(Q)install -d "$(INSTALL_DOC_DIR)"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/docs/"* "$(BUNDLE_DIR)/README.md" "$(BUNDLE_DIR)/known-bugs.txt" "$(INSTALL_DOC_DIR)"
|
||||
endif
|
||||
ifndef DO_NOT_INSTALL_CHANGELOG
|
||||
$(Q)install -d "$(INSTALL_DOC_DIR)"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/changelog.txt" "$(INSTALL_DOC_DIR)"
|
||||
endif
|
||||
ifndef DO_NOT_INSTALL_LICENSE
|
||||
$(Q)install -d "$(INSTALL_DOC_DIR)"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/COPYING.md" "$(INSTALL_DOC_DIR)"
|
||||
endif
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/openttd.32.xpm" "$(INSTALL_ICON_DIR)/${BINARY_NAME}.32.xpm"
|
||||
ifdef ICON_THEME_DIR
|
||||
$(Q)install -d "$(INSTALL_ICON_THEME_DIR)"
|
||||
$(Q)install -d "$(INSTALL_ICON_THEME_DIR)/16x16/apps"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/openttd.16.png" "$(INSTALL_ICON_THEME_DIR)/16x16/apps/${BINARY_NAME}.png"
|
||||
$(Q)install -d "$(INSTALL_ICON_THEME_DIR)/32x32/apps"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/openttd.32.png" "$(INSTALL_ICON_THEME_DIR)/32x32/apps/${BINARY_NAME}.png"
|
||||
$(Q)install -d "$(INSTALL_ICON_THEME_DIR)/48x48/apps"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/openttd.48.png" "$(INSTALL_ICON_THEME_DIR)/48x48/apps/${BINARY_NAME}.png"
|
||||
$(Q)install -d "$(INSTALL_ICON_THEME_DIR)/64x64/apps"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/openttd.64.png" "$(INSTALL_ICON_THEME_DIR)/64x64/apps/${BINARY_NAME}.png"
|
||||
$(Q)install -d "$(INSTALL_ICON_THEME_DIR)/128x128/apps"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/openttd.128.png" "$(INSTALL_ICON_THEME_DIR)/128x128/apps/${BINARY_NAME}.png"
|
||||
$(Q)install -d "$(INSTALL_ICON_THEME_DIR)/256x256/apps"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/openttd.256.png" "$(INSTALL_ICON_THEME_DIR)/256x256/apps/${BINARY_NAME}.png"
|
||||
else
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/media/"*.png "$(INSTALL_ICON_DIR)"
|
||||
endif
|
||||
ifdef MAN_DIR
|
||||
ifndef DO_NOT_INSTALL_MAN
|
||||
$(Q)install -d "$(INSTALL_MAN_DIR)"
|
||||
$(Q)install -m 644 "$(BUNDLE_DIR)/man/openttd.6.gz" "$(INSTALL_MAN_DIR)/${BINARY_NAME}.6.gz"
|
||||
endif
|
||||
endif
|
||||
ifdef MENU_DIR
|
||||
$(Q)install -d "$(INSTALL_MENU_DIR)"
|
||||
$(Q)install -m 644 "$(ROOT_DIR)/media/openttd.desktop.install" "$(INSTALL_MENU_DIR)/${BINARY_NAME}.desktop"
|
||||
endif
|
||||
endif # OSXAPP
|
116
Makefile.grf.in
Normal file
116
Makefile.grf.in
Normal file
@@ -0,0 +1,116 @@
|
||||
# This file is part of OpenTTD.
|
||||
# OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
# OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# Building requires GRFCodec.
|
||||
#
|
||||
# Recent versions (including sources) can be found at:
|
||||
# http://www.openttd.org/download-grfcodec
|
||||
#
|
||||
# The mercurial repository can be found at:
|
||||
# http://hg.openttdcoop.org/grfcodec
|
||||
#
|
||||
|
||||
|
||||
ROOT_DIR = !!ROOT_DIR!!
|
||||
GRF_DIR = $(ROOT_DIR)/media/extra_grf
|
||||
BASESET_DIR = $(ROOT_DIR)/media/baseset
|
||||
LANG_DIR = $(ROOT_DIR)/src/lang
|
||||
BIN_DIR = !!BIN_DIR!!/baseset
|
||||
OBJS_DIR = !!GRF_OBJS_DIR!!
|
||||
OS = !!OS!!
|
||||
STAGE = !!STAGE!!
|
||||
|
||||
# Check if we want to show what we are doing
|
||||
ifdef VERBOSE
|
||||
Q =
|
||||
E = @true
|
||||
else
|
||||
Q = @
|
||||
E = @echo
|
||||
endif
|
||||
|
||||
GRFCODEC := !!GRFCODEC!!
|
||||
NFORENUM := !!NFORENUM!!
|
||||
CC_BUILD := !!CC_BUILD!!
|
||||
MD5SUM := $(shell [ "$(OS)" = "OSX" ] && echo "md5 -r" || echo "md5sum")
|
||||
|
||||
# Some "should not be changed" settings.
|
||||
NFO_FILES := $(GRF_DIR)/*.nfo $(GRF_DIR)/rivers/*.nfo
|
||||
PNG_FILES := $(GRF_DIR)/*.png $(GRF_DIR)/rivers/*.png
|
||||
|
||||
# List of target files.
|
||||
OBT_FILES := $(BIN_DIR)/orig_dos.obg
|
||||
OBT_FILES += $(BIN_DIR)/orig_dos_de.obg
|
||||
OBT_FILES += $(BIN_DIR)/orig_win.obg
|
||||
OBT_FILES += $(BIN_DIR)/orig_dos.obs
|
||||
OBT_FILES += $(BIN_DIR)/orig_win.obs
|
||||
OBT_FILES += $(BIN_DIR)/no_sound.obs
|
||||
OBT_FILES += $(BIN_DIR)/orig_dos.obm
|
||||
OBT_FILES += $(BIN_DIR)/orig_win.obm
|
||||
OBT_FILES += $(BIN_DIR)/no_music.obm
|
||||
OBT_FILES += $(BIN_DIR)/orig_tto.obm
|
||||
|
||||
# Build the GRF.
|
||||
all: $(OBT_FILES)
|
||||
ifdef GRFCODEC
|
||||
all: $(BIN_DIR)/openttd.grf $(BIN_DIR)/orig_extra.grf
|
||||
endif
|
||||
|
||||
$(OBJS_DIR)/langfiles.tmp: $(LANG_DIR)/*.txt
|
||||
$(E) '$(STAGE) Collecting baseset translations'
|
||||
$(Q) cat $^ > $@
|
||||
|
||||
$(BIN_DIR)/%.obg: $(BASESET_DIR)/%.obg $(BIN_DIR)/orig_extra.grf $(OBJS_DIR)/langfiles.tmp $(BASESET_DIR)/translations.awk
|
||||
$(E) '$(STAGE) Updating $(notdir $@)'
|
||||
$(Q) sed 's/^ORIG_EXTRA.GRF = *[0-9a-f]*$$/ORIG_EXTRA.GRF = '`$(MD5SUM) $(BIN_DIR)/orig_extra.grf | sed 's@ .*@@'`'/' $< > $@.tmp
|
||||
$(Q) awk -v langfiles='$(OBJS_DIR)/langfiles.tmp' -f $(BASESET_DIR)/translations.awk $@.tmp >$@
|
||||
$(Q) rm $@.tmp
|
||||
|
||||
$(BIN_DIR)/%.obs: $(BASESET_DIR)/%.obs $(OBJS_DIR)/langfiles.tmp $(BASESET_DIR)/translations.awk
|
||||
$(E) '$(STAGE) Updating $(notdir $@)'
|
||||
$(Q) awk -v langfiles='$(OBJS_DIR)/langfiles.tmp' -f $(BASESET_DIR)/translations.awk $< >$@
|
||||
|
||||
$(BIN_DIR)/%.obm: $(BASESET_DIR)/%.obm $(OBJS_DIR)/langfiles.tmp $(BASESET_DIR)/translations.awk
|
||||
$(E) '$(STAGE) Updating $(notdir $@)'
|
||||
$(Q) awk -v langfiles='$(OBJS_DIR)/langfiles.tmp' -f $(BASESET_DIR)/translations.awk $< >$@
|
||||
|
||||
# Guard against trying to run GRFCODEC/NFORENUM without either being set.
|
||||
ifdef GRFCODEC
|
||||
ifdef NFORENUM
|
||||
|
||||
# Compile extra grf
|
||||
$(BIN_DIR)/openttd.grf: $(PNG_FILES) $(NFO_FILES) $(GRF_DIR)/assemble_nfo.awk
|
||||
$(E) '$(STAGE) Assembling openttd.nfo'
|
||||
$(Q)-mkdir -p $(OBJS_DIR)/sprites
|
||||
$(Q)-cp $(PNG_FILES) $(OBJS_DIR)/sprites 2> /dev/null
|
||||
$(Q) awk -f $(GRF_DIR)/assemble_nfo.awk $(GRF_DIR)/openttd.nfo > $(OBJS_DIR)/sprites/openttd.nfo
|
||||
$(Q) $(NFORENUM) -s $(OBJS_DIR)/sprites/openttd.nfo
|
||||
$(E) '$(STAGE) Compiling openttd.grf'
|
||||
$(Q) $(GRFCODEC) -n -s -e -p1 $(OBJS_DIR)/openttd.grf
|
||||
$(Q)cp $(OBJS_DIR)/openttd.grf $(BIN_DIR)/openttd.grf
|
||||
|
||||
# The copy operation of PNG_FILES is duplicated from the target 'openttd.grf', thus those targets may not run in parallel.
|
||||
$(BIN_DIR)/orig_extra.grf: $(PNG_FILES) $(NFO_FILES) $(GRF_DIR)/assemble_nfo.awk | $(BIN_DIR)/openttd.grf
|
||||
$(E) '$(STAGE) Assembling orig_extra.nfo'
|
||||
$(Q)-mkdir -p $(OBJS_DIR)/sprites
|
||||
$(Q)-cp $(PNG_FILES) $(OBJS_DIR)/sprites 2> /dev/null
|
||||
$(Q) awk -f $(GRF_DIR)/assemble_nfo.awk $(GRF_DIR)/orig_extra.nfo > $(OBJS_DIR)/sprites/orig_extra.nfo
|
||||
$(Q) $(NFORENUM) -s $(OBJS_DIR)/sprites/orig_extra.nfo
|
||||
$(E) '$(STAGE) Compiling orig_extra.grf'
|
||||
$(Q) $(GRFCODEC) -n -s -e -p1 $(OBJS_DIR)/orig_extra.grf
|
||||
$(Q)cp $(OBJS_DIR)/orig_extra.grf $(BIN_DIR)/orig_extra.grf
|
||||
|
||||
endif
|
||||
endif
|
||||
|
||||
# Clean up temporary files.
|
||||
clean:
|
||||
$(Q)rm -f *.bak *.grf $(OBT_FILES)
|
||||
|
||||
# Clean up temporary files
|
||||
mrproper: clean
|
||||
$(Q)rm -fr sprites
|
||||
|
||||
.PHONY: all mrproper depend clean
|
185
Makefile.in
Normal file
185
Makefile.in
Normal file
@@ -0,0 +1,185 @@
|
||||
# This file is part of OpenTTD.
|
||||
# OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
# OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Check if we want to show what we are doing
|
||||
ifdef VERBOSE
|
||||
Q =
|
||||
else
|
||||
Q = @
|
||||
endif
|
||||
|
||||
include Makefile.am
|
||||
|
||||
CONFIG_CACHE_PWD = !!CONFIG_CACHE_PWD!!
|
||||
CONFIG_CACHE_SOURCE_LIST = !!CONFIG_CACHE_SOURCE_LIST!!
|
||||
BIN_DIR = !!BIN_DIR!!
|
||||
ICON_THEME_DIR = !!ICON_THEME_DIR!!
|
||||
MAN_DIR = !!MAN_DIR!!
|
||||
MENU_DIR = !!MENU_DIR!!
|
||||
SRC_DIR = !!SRC_DIR!!
|
||||
ROOT_DIR = !!ROOT_DIR!!
|
||||
BUNDLE_DIR = "$(ROOT_DIR)/bundle"
|
||||
BUNDLES_DIR = "$(ROOT_DIR)/bundles"
|
||||
INSTALL_DIR = !!INSTALL_DIR!!
|
||||
INSTALL_BINARY_DIR = "$(INSTALL_DIR)/"!!BINARY_DIR!!
|
||||
INSTALL_MAN_DIR = "$(INSTALL_DIR)/$(MAN_DIR)"
|
||||
INSTALL_MENU_DIR = "$(INSTALL_DIR)/$(MENU_DIR)"
|
||||
INSTALL_ICON_DIR = "$(INSTALL_DIR)/"!!ICON_DIR!!
|
||||
INSTALL_ICON_THEME_DIR = "$(INSTALL_DIR)/$(ICON_THEME_DIR)"
|
||||
INSTALL_DATA_DIR = "$(INSTALL_DIR)/"!!DATA_DIR!!
|
||||
INSTALL_DOC_DIR = "$(INSTALL_DIR)/"!!DOC_DIR!!
|
||||
SOURCE_LIST = !!SOURCE_LIST!!
|
||||
CONFIGURE_FILES = !!CONFIGURE_FILES!!
|
||||
BINARY_NAME = !!BINARY_NAME!!
|
||||
STRIP = !!STRIP!!
|
||||
TTD = !!TTD!!
|
||||
TTDS = $(SRC_DIRS:%=%/$(TTD))
|
||||
OS = !!OS!!
|
||||
OSXAPP = !!OSXAPP!!
|
||||
LIPO = !!LIPO!!
|
||||
AWK = !!AWK!!
|
||||
SORT = !!SORT!!
|
||||
DISTCC = !!DISTCC!!
|
||||
|
||||
RES := $(shell if [ ! -f $(CONFIG_CACHE_PWD) ] || [ "`pwd`" != "`cat $(CONFIG_CACHE_PWD)`" ]; then echo "`pwd`" > $(CONFIG_CACHE_PWD); fi )
|
||||
RES := $(shell if [ ! -f $(CONFIG_CACHE_SOURCE_LIST) ] || [ -n "`cmp $(CONFIG_CACHE_SOURCE_LIST) $(SOURCE_LIST) 2>/dev/null`" ]; then cp $(SOURCE_LIST) $(CONFIG_CACHE_SOURCE_LIST); fi )
|
||||
|
||||
all: config.pwd config.cache
|
||||
ifdef DISTCC
|
||||
@if [ -z "`echo '$(MFLAGS)' | grep '\-j'`" ]; then echo; echo "WARNING: you enabled distcc support, but you don't seem to be using the -jN parameter"; echo; fi
|
||||
endif
|
||||
@for dir in $(DIRS); do \
|
||||
$(MAKE) -C $$dir all || exit 1; \
|
||||
done
|
||||
ifdef LIPO
|
||||
# Lipo is an OSX thing. If it is defined, it means we are building for universal,
|
||||
# and so we have have to combine the binaries into one big binary
|
||||
|
||||
# Remove the last binary made by the last compiled target
|
||||
$(Q)rm -f $(BIN_DIR)/$(TTD)
|
||||
# Make all the binaries into one
|
||||
$(Q)$(LIPO) -create -output $(BIN_DIR)/$(TTD) $(TTDS)
|
||||
endif
|
||||
|
||||
help:
|
||||
@echo "Available make commands:"
|
||||
@echo ""
|
||||
@echo "Compilation:"
|
||||
@echo " all compile the executable and the lang files"
|
||||
@echo " lang compile the lang files only"
|
||||
@echo "Clean up:"
|
||||
@echo " clean remove the files generated during compilation"
|
||||
@echo " mrproper remove the files generated during configuration and compilation"
|
||||
@echo "Run after compilation:"
|
||||
@echo " run execute openttd after the compilation"
|
||||
@echo " run-gdb execute openttd in debug mode after the compilation"
|
||||
@echo " run-prof execute openttd in profiling mode after the compilation"
|
||||
@echo "Installation:"
|
||||
@echo " install install the compiled files and the data-files after the compilation"
|
||||
@echo " bundle create the base for an installation bundle"
|
||||
@echo " bundle_zip create the zip installation bundle"
|
||||
@echo " bundle_gzip create the gzip installation bundle"
|
||||
@echo " bundle_bzip2 create the bzip2 installation bundle"
|
||||
@echo " bundle_lha create the lha installation bundle"
|
||||
@echo " bundle_dmg create the dmg installation bundle"
|
||||
|
||||
config.pwd: $(CONFIG_CACHE_PWD)
|
||||
$(MAKE) reconfigure
|
||||
|
||||
config.cache: $(CONFIG_CACHE_SOURCE_LIST) $(CONFIGURE_FILES)
|
||||
$(MAKE) reconfigure
|
||||
|
||||
reconfigure:
|
||||
ifeq ($(shell if test -f config.cache; then echo 1; fi), 1)
|
||||
@echo "----------------"
|
||||
@echo "The system detected that source.list or any configure file is altered."
|
||||
@echo " Going to reconfigure with last known settings..."
|
||||
@echo "----------------"
|
||||
# Make sure we don't lock config.cache
|
||||
@$(shell cat config.cache | sed 's@\\ @\\\\ @g') || exit 1
|
||||
@echo "----------------"
|
||||
@echo "Reconfig done. Please re-execute make."
|
||||
@echo "----------------"
|
||||
else
|
||||
@echo "----------------"
|
||||
@echo "Have not found a configuration, please run configure first."
|
||||
@echo "----------------"
|
||||
@exit 1
|
||||
endif
|
||||
|
||||
clean:
|
||||
@for dir in $(DIRS); do \
|
||||
$(MAKE) -C $$dir clean; \
|
||||
done
|
||||
$(Q)rm -rf $(BUNDLE_TARGET)
|
||||
|
||||
lang:
|
||||
@for dir in $(LANG_DIRS); do \
|
||||
$(MAKE) -C $$dir all; \
|
||||
done
|
||||
|
||||
mrproper:
|
||||
@for dir in $(DIRS); do \
|
||||
$(MAKE) -C $$dir mrproper; \
|
||||
done
|
||||
# Don't be tempted to merge these two for loops. Doing that breaks make
|
||||
# --dry-run, since make has this "feature" that it always runs commands
|
||||
# containing $(MAKE), even when --dry-run is passed. The objective is of
|
||||
# course to also get a dry-run of submakes, but make is not smart enough
|
||||
# to see that a for loop runs both a submake and an actual command.
|
||||
@for dir in $(DIRS); do \
|
||||
rm -f $$dir/Makefile; \
|
||||
done
|
||||
$(Q)rm -rf objs
|
||||
$(Q)rm -f Makefile Makefile.am Makefile.bundle
|
||||
$(Q)rm -f media/openttd.desktop media/openttd.desktop.install
|
||||
$(Q)rm -f $(CONFIG_CACHE_SOURCE_LIST) config.cache config.pwd config.log $(CONFIG_CACHE_PWD)
|
||||
# directories for bundle generation
|
||||
$(Q)rm -rf $(BUNDLE_DIR)
|
||||
$(Q)rm -rf $(BUNDLES_DIR)
|
||||
# output of profiling
|
||||
$(Q)rm -f $(BIN_DIR)/gmon.out
|
||||
# output of generating 'API' documentation
|
||||
$(Q)rm -rf $(ROOT_DIR)/docs/source
|
||||
$(Q)rm -rf $(ROOT_DIR)/docs/aidocs
|
||||
$(Q)rm -rf $(ROOT_DIR)/docs/gamedocs
|
||||
# directories created by OpenTTD on regression testing
|
||||
$(Q)rm -rf $(BIN_DIR)/ai/regression/content_download $(BIN_DIR)/ai/regression/save $(BIN_DIR)/ai/regression/scenario
|
||||
distclean: mrproper
|
||||
|
||||
maintainer-clean: distclean
|
||||
$(Q)rm -f $(BIN_DIR)/baseset/openttd.grf $(BIN_DIR)/baseset/orig_extra.grf $(BIN_DIR)/baseset/*.obg $(BIN_DIR)/baseset/*.obs $(BIN_DIR)/baseset/*.obm
|
||||
|
||||
depend:
|
||||
@for dir in $(SRC_DIRS); do \
|
||||
$(MAKE) -C $$dir depend; \
|
||||
done
|
||||
|
||||
run: all
|
||||
$(Q)cd !!BIN_DIR!! && ./!!TTD!! $(OPENTTD_ARGS)
|
||||
|
||||
run-gdb: all
|
||||
$(Q)cd !!BIN_DIR!! && gdb --ex run --args ./!!TTD!! $(OPENTTD_ARGS)
|
||||
|
||||
run-prof: all
|
||||
$(Q)cd !!BIN_DIR!! && ./!!TTD!! $(OPENTTD_ARGS) && gprof !!TTD!! | less
|
||||
|
||||
regression: all
|
||||
$(Q)cd !!BIN_DIR!! && sh ai/regression/run.sh
|
||||
test: regression
|
||||
|
||||
%.o:
|
||||
@for dir in $(SRC_DIRS); do \
|
||||
$(MAKE) -C $$dir $(@:src/%=%); \
|
||||
done
|
||||
|
||||
%.lng:
|
||||
@for dir in $(LANG_DIRS); do \
|
||||
$(MAKE) -C $$dir $@; \
|
||||
done
|
||||
|
||||
.PHONY: test distclean mrproper clean
|
||||
|
||||
include Makefile.bundle
|
87
Makefile.lang.in
Normal file
87
Makefile.lang.in
Normal file
@@ -0,0 +1,87 @@
|
||||
# This file is part of OpenTTD.
|
||||
# OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
# OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
STRGEN = !!STRGEN!!
|
||||
SRC_DIR = !!SRC_DIR!!
|
||||
LANG_DIR = !!LANG_DIR!!
|
||||
BIN_DIR = !!BIN_DIR!!
|
||||
LANGS_SRC = $(shell ls $(LANG_DIR)/*.txt)
|
||||
LANGS = $(LANGS_SRC:$(LANG_DIR)/%.txt=%.lng)
|
||||
CXX_BUILD = !!CXX_BUILD!!
|
||||
CFLAGS_BUILD = !!CFLAGS_BUILD!!
|
||||
CXXFLAGS_BUILD= !!CXXFLAGS_BUILD!!
|
||||
LDFLAGS_BUILD = !!LDFLAGS_BUILD!!
|
||||
STRGEN_FLAGS = !!STRGEN_FLAGS!!
|
||||
STAGE = !!STAGE!!
|
||||
LANG_SUPPRESS = !!LANG_SUPPRESS!!
|
||||
LANG_OBJS_DIR = !!LANG_OBJS_DIR!!
|
||||
|
||||
ifeq ($(LANG_SUPPRESS), yes)
|
||||
LANG_ERRORS = >/dev/null 2>&1
|
||||
endif
|
||||
|
||||
# Check if we want to show what we are doing
|
||||
ifdef VERBOSE
|
||||
Q =
|
||||
E = @true
|
||||
else
|
||||
Q = @
|
||||
E = @echo
|
||||
endif
|
||||
|
||||
RES := $(shell mkdir -p $(BIN_DIR)/lang )
|
||||
|
||||
all: table/strings.h $(LANGS)
|
||||
|
||||
strgen_base.o: $(SRC_DIR)/strgen/strgen_base.cpp $(SRC_DIR)/strgen/strgen.h $(SRC_DIR)/table/control_codes.h $(SRC_DIR)/table/strgen_tables.h $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSTRGEN -c -o $@ $<
|
||||
|
||||
strgen.o: $(SRC_DIR)/strgen/strgen.cpp $(SRC_DIR)/strgen/strgen.h $(SRC_DIR)/table/control_codes.h $(SRC_DIR)/table/strgen_tables.h $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSTRGEN -c -o $@ $<
|
||||
|
||||
string.o: $(SRC_DIR)/string.cpp $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSTRGEN -c -o $@ $<
|
||||
|
||||
alloc_func.o: $(SRC_DIR)/core/alloc_func.cpp $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSTRGEN -c -o $@ $<
|
||||
|
||||
getoptdata.o: $(SRC_DIR)/misc/getoptdata.cpp $(SRC_DIR)/misc/getoptdata.h $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/misc/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSTRGEN -c -o $@ $<
|
||||
|
||||
lang/english.txt: $(LANG_DIR)/english.txt
|
||||
$(Q)mkdir -p lang
|
||||
$(Q)cp $(LANG_DIR)/english.txt lang/english.txt
|
||||
|
||||
$(STRGEN): alloc_func.o string.o strgen_base.o strgen.o getoptdata.o
|
||||
$(E) '$(STAGE) Compiling and Linking $@'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) $(LDFLAGS_BUILD) $^ -o $@
|
||||
|
||||
table/strings.h: lang/english.txt $(STRGEN)
|
||||
$(E) '$(STAGE) Generating $@'
|
||||
@mkdir -p table
|
||||
$(Q)./$(STRGEN) -s $(LANG_DIR) -d table
|
||||
|
||||
$(LANGS): %.lng: $(LANG_DIR)/%.txt $(STRGEN) lang/english.txt
|
||||
$(E) '$(STAGE) Compiling language $(*F)'
|
||||
$(Q)./$(STRGEN) $(STRGEN_FLAGS) -s $(LANG_DIR) -d $(LANG_OBJS_DIR) $< $(LANG_ERRORS) && cp $@ $(BIN_DIR)/lang || true # Do not fail all languages when one fails
|
||||
|
||||
depend:
|
||||
|
||||
clean:
|
||||
$(E) '$(STAGE) Cleaning up language files'
|
||||
$(Q)rm -f strgen_base.o strgen.o string.o alloc_func.o getoptdata.o table/strings.h $(STRGEN) $(LANGS) $(LANGS:%=$(BIN_DIR)/lang/%) lang/english.*
|
||||
|
||||
mrproper: clean
|
||||
$(Q)rm -rf $(BIN_DIR)/lang
|
||||
|
||||
%.lng:
|
||||
@echo '$(STAGE) No such language: $(@:%.lng=%)'
|
||||
|
||||
.PHONY: all mrproper depend clean
|
45
Makefile.msvc
Normal file
45
Makefile.msvc
Normal file
@@ -0,0 +1,45 @@
|
||||
# This file is part of OpenTTD.
|
||||
# OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
# OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
#
|
||||
# Makefile for creating bundles of MSVC's binaries in the same way as we make
|
||||
# the zip bundles for ALL other OSes.
|
||||
#
|
||||
# Usage: make -f Makefile.msvc PLATFORM=[Win32|x64] BUNDLE_NAME=openttd-<version>-win[32|64]
|
||||
# or make -f Makefile.msvc PLATFORM=[Win32|x64] BUNDLE_NAME=OTTD-win[32|64]-nightly-<revision>
|
||||
#
|
||||
|
||||
# Check if we want to show what we are doing
|
||||
ifdef VERBOSE
|
||||
Q =
|
||||
else
|
||||
Q = @
|
||||
endif
|
||||
|
||||
AWK = "awk"
|
||||
ROOT_DIR := $(shell pwd)
|
||||
BIN_DIR = "$(ROOT_DIR)/bin"
|
||||
SRC_DIR = "$(ROOT_DIR)/src"
|
||||
BUNDLE_DIR = "$(ROOT_DIR)/bundle"
|
||||
BUNDLES_DIR = "$(ROOT_DIR)/bundles"
|
||||
TTD = openttd.exe
|
||||
PDB = openttd.pdb
|
||||
MODE = Release
|
||||
TARGET := $(shell echo $(PLATFORM) | sed "s@win64@x64@;s@win32@Win32@")
|
||||
|
||||
all:
|
||||
$(Q)cp objs/$(TARGET)/$(MODE)/$(TTD) $(BIN_DIR)/$(TTD)
|
||||
|
||||
include Makefile.bundle.in
|
||||
|
||||
bundle_pdb:
|
||||
@echo '[BUNDLE] Creating $(BUNDLE_NAME).pdb.xz'
|
||||
$(Q)mkdir -p "$(BUNDLES_DIR)"
|
||||
$(Q)cp objs/$(TARGET)/Release/$(PDB) $(BUNDLES_DIR)/$(BUNDLE_NAME).pdb
|
||||
$(Q)xz -9 $(BUNDLES_DIR)/$(BUNDLE_NAME).pdb
|
||||
|
||||
regression: all
|
||||
$(Q)cp bin/$(TTD) bin/openttd
|
||||
$(Q)cd bin && sh ai/regression/run.sh
|
63
Makefile.setting.in
Normal file
63
Makefile.setting.in
Normal file
@@ -0,0 +1,63 @@
|
||||
# This file is part of OpenTTD.
|
||||
# OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
# OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
SETTINGSGEN = !!SETTINGSGEN!!
|
||||
SRC_DIR = !!SRC_DIR!!
|
||||
CXX_BUILD = !!CXX_BUILD!!
|
||||
CFLAGS_BUILD = !!CFLAGS_BUILD!!
|
||||
CXXFLAGS_BUILD = !!CXXFLAGS_BUILD!!
|
||||
LDFLAGS_BUILD = !!LDFLAGS_BUILD!!
|
||||
STAGE = !!STAGE!!
|
||||
SETTING_OBJS_DIR = !!SETTING_OBJS_DIR!!
|
||||
|
||||
# Check if we want to show what we are doing
|
||||
ifdef VERBOSE
|
||||
Q =
|
||||
E = @true
|
||||
else
|
||||
Q = @
|
||||
E = @echo
|
||||
endif
|
||||
|
||||
all: table/settings.h
|
||||
|
||||
settingsgen.o: $(SRC_DIR)/settingsgen/settingsgen.cpp $(SRC_DIR)/string_func.h $(SRC_DIR)/strings_type.h $(SRC_DIR)/misc/getoptdata.h $(SRC_DIR)/ini_type.h $(SRC_DIR)/core/smallvec_type.hpp $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSETTINGSGEN -c -o $@ $<
|
||||
|
||||
alloc_func.o: $(SRC_DIR)/core/alloc_func.cpp $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSETTINGSGEN -c -o $@ $<
|
||||
|
||||
getoptdata.o: $(SRC_DIR)/misc/getoptdata.cpp $(SRC_DIR)/misc/getoptdata.h $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/misc/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSETTINGSGEN -c -o $@ $<
|
||||
|
||||
string.o: $(SRC_DIR)/string.cpp $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSETTINGSGEN -c -o $@ $<
|
||||
|
||||
ini_load.o: $(SRC_DIR)/ini_load.cpp $(SRC_DIR)/core/alloc_func.hpp $(SRC_DIR)/core/mem_func.hpp $(SRC_DIR)/ini_type.h $(SRC_DIR)/string_func.h $(SRC_DIR)/safeguards.h
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) -DSETTINGSGEN -c -o $@ $<
|
||||
|
||||
$(SETTINGSGEN): alloc_func.o string.o ini_load.o settingsgen.o getoptdata.o
|
||||
$(E) '$(STAGE) Compiling and Linking $@'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) $(LDFLAGS_BUILD) $^ -o $@
|
||||
|
||||
table/settings.h: $(SETTINGSGEN) $(SRC_DIR)/table/settings.h.preamble $(SRC_DIR)/table/settings.h.postamble $(SRC_DIR)/table/*.ini
|
||||
$(E) '$(STAGE) Generating $@'
|
||||
@mkdir -p table
|
||||
$(Q)./$(SETTINGSGEN) -o table/settings.h -b $(SRC_DIR)/table/settings.h.preamble -a $(SRC_DIR)/table/settings.h.postamble $(SRC_DIR)/table/*.ini
|
||||
|
||||
depend:
|
||||
|
||||
clean:
|
||||
$(E) '$(STAGE) Cleaning up settings files'
|
||||
$(Q)rm -f settingsgen.o alloc_func.o getoptdata.o string.o ini_load.o $(SETTINGSGEN) table/settings.h
|
||||
|
||||
mrproper: clean
|
||||
|
||||
.PHONY: all mrproper depend clean
|
295
Makefile.src.in
Normal file
295
Makefile.src.in
Normal file
@@ -0,0 +1,295 @@
|
||||
# This file is part of OpenTTD.
|
||||
# OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
# OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
CC_HOST = !!CC_HOST!!
|
||||
CXX_HOST = !!CXX_HOST!!
|
||||
CC_BUILD = !!CC_BUILD!!
|
||||
CXX_BUILD = !!CXX_BUILD!!
|
||||
WINDRES = !!WINDRES!!
|
||||
STRIP = !!STRIP!!
|
||||
CFLAGS = !!CFLAGS!!
|
||||
CFLAGS_BUILD = !!CFLAGS_BUILD!!
|
||||
CXXFLAGS = !!CXXFLAGS!!
|
||||
CXXFLAGS_BUILD = !!CXXFLAGS_BUILD!!
|
||||
LIBS = !!LIBS!!
|
||||
LDFLAGS = !!LDFLAGS!!
|
||||
LDFLAGS_BUILD = !!LDFLAGS_BUILD!!
|
||||
ROOT_DIR = !!ROOT_DIR!!
|
||||
BIN_DIR = !!BIN_DIR!!
|
||||
LANG_DIR = !!LANG_DIR!!
|
||||
SRC_OBJS_DIR = !!SRC_OBJS_DIR!!
|
||||
LANG_OBJS_DIR = !!LANG_OBJS_DIR!!
|
||||
SETTING_OBJS_DIR= !!SETTING_OBJS_DIR!!
|
||||
SRC_DIR = !!SRC_DIR!!
|
||||
SCRIPT_SRC_DIR = !!SCRIPT_SRC_DIR!!
|
||||
MEDIA_DIR = !!MEDIA_DIR!!
|
||||
TTD = !!TTD!!
|
||||
STRGEN = !!STRGEN!!
|
||||
DEPEND = !!DEPEND!!
|
||||
OS = !!OS!!
|
||||
STAGE = !!STAGE!!
|
||||
MAKEDEPEND = !!MAKEDEPEND!!
|
||||
CFLAGS_MAKEDEP = !!CFLAGS_MAKEDEP!!
|
||||
SORT = !!SORT!!
|
||||
AWK = !!AWK!!
|
||||
CONFIG_CACHE_COMPILER = $(SRC_OBJS_DIR)/!!CONFIG_CACHE_COMPILER!!
|
||||
CONFIG_CACHE_LINKER = $(SRC_OBJS_DIR)/!!CONFIG_CACHE_LINKER!!
|
||||
CONFIG_CACHE_SOURCE = $(SRC_OBJS_DIR)/!!CONFIG_CACHE_SOURCE!!
|
||||
CONFIG_CACHE_VERSION = $(SRC_OBJS_DIR)/!!CONFIG_CACHE_VERSION!!
|
||||
|
||||
OBJS_C := !!OBJS_C!!
|
||||
OBJS_CPP := !!OBJS_CPP!!
|
||||
OBJS_MM := !!OBJS_MM!!
|
||||
OBJS_RC := !!OBJS_RC!!
|
||||
OBJS := $(OBJS_C) $(OBJS_CPP) $(OBJS_MM) $(OBJS_RC)
|
||||
SRCS := !!SRCS!!
|
||||
|
||||
# All C-files depend on those 3 files
|
||||
FILE_DEP := $(CONFIG_CACHE_COMPILER)
|
||||
# Create all dirs and subdirs
|
||||
RES := $(shell mkdir -p $(BIN_DIR) $(sort $(dir $(OBJS))))
|
||||
|
||||
CFLAGS += -I $(SRC_OBJS_DIR) -I $(LANG_OBJS_DIR) -I $(SETTING_OBJS_DIR)
|
||||
CFLAGS_MAKEDEP += -I $(SRC_OBJS_DIR) -I $(LANG_OBJS_DIR) -I $(SETTING_OBJS_DIR)
|
||||
ifdef SCRIPT_SRC_DIR
|
||||
CFLAGS_MAKEDEP += -I $(SCRIPT_SRC_DIR)
|
||||
endif
|
||||
|
||||
# Check if we want to show what we are doing
|
||||
ifdef VERBOSE
|
||||
Q =
|
||||
E = @true
|
||||
else
|
||||
Q = @
|
||||
E = @echo
|
||||
endif
|
||||
|
||||
# Our default target
|
||||
all: $(BIN_DIR)/$(TTD)
|
||||
|
||||
# This are 2 rules that are pointing back to STRGEN stuff.
|
||||
# There is not really a need to have them here, but in case
|
||||
# some weirdo wants to run 'make' in the 'src' dir and expects
|
||||
# the languages to be recompiled, this catches that case and
|
||||
# takes care of it nicely.
|
||||
$(LANG_OBJS_DIR)/$(STRGEN):
|
||||
$(MAKE) -C $(LANG_OBJS_DIR) $(STRGEN)
|
||||
|
||||
$(LANG_OBJS_DIR)/table/strings.h: $(LANG_DIR)/english.txt $(LANG_OBJS_DIR)/$(STRGEN)
|
||||
$(MAKE) -C $(LANG_OBJS_DIR) table/strings.h
|
||||
|
||||
# Always run version detection, so we always have an accurate modified
|
||||
# flag
|
||||
VERSIONS := $(shell AWK="$(AWK)" "$(ROOT_DIR)/findversion.sh")
|
||||
MODIFIED := $(shell echo "$(VERSIONS)" | cut -f 3 -d' ')
|
||||
|
||||
# Use autodetected revisions
|
||||
VERSION := $(shell echo "$(VERSIONS)" | cut -f 1 -d' ')
|
||||
ISODATE := $(shell echo "$(VERSIONS)" | cut -f 2 -d' ')
|
||||
GITHASH := $(shell echo "$(VERSIONS)" | cut -f 4 -d' ')
|
||||
ISTAG := $(shell echo "$(VERSIONS)" | cut -f 5 -d' ')
|
||||
ISSTABLETAG := $(shell echo "$(VERSIONS)" | cut -f 6 -d' ')
|
||||
YEAR := $(shell echo "$(VERSIONS)" | cut -f 7 -d' ')
|
||||
|
||||
# Make sure we have something in VERSION and ISODATE
|
||||
ifeq ($(VERSION),)
|
||||
VERSION := norev000
|
||||
endif
|
||||
ifeq ($(ISODATE),)
|
||||
ISODATE := 00000000
|
||||
endif
|
||||
|
||||
# This helps to recompile if flags change
|
||||
RES := $(shell if [ "`cat $(CONFIG_CACHE_COMPILER) 2>/dev/null`" != "$(CFLAGS) $(CXXFLAGS)" ]; then echo "$(CFLAGS) $(CXXFLAGS)" > $(CONFIG_CACHE_COMPILER); fi )
|
||||
RES := $(shell if [ "`cat $(CONFIG_CACHE_LINKER) 2>/dev/null`" != "$(LDFLAGS) $(LIBS)" ]; then echo "$(LDFLAGS) $(LIBS)" > $(CONFIG_CACHE_LINKER); fi )
|
||||
|
||||
# If there is a change in the source-file-list, make sure we recheck the deps
|
||||
RES := $(shell if [ "`cat $(CONFIG_CACHE_SOURCE) 2>/dev/null`" != "$(SRCS)" ]; then echo "$(SRCS)" > $(CONFIG_CACHE_SOURCE); fi )
|
||||
# If there is a change in the revision, make sure we recompile rev.cpp
|
||||
RES := $(shell if [ "`cat $(CONFIG_CACHE_VERSION) 2>/dev/null`" != "$(VERSION) $(MODIFIED)" ]; then echo "$(VERSION) $(MODIFIED)" > $(CONFIG_CACHE_VERSION); fi )
|
||||
|
||||
ifndef MAKEDEPEND
|
||||
# The slow, but always correct, dep-check
|
||||
DEP_MASK := %.d
|
||||
DEPS := $(OBJS:%.o=%.d)
|
||||
|
||||
# Only include the deps if we are compiling everything
|
||||
ifeq ($(filter %.o clean mrproper, $(MAKECMDGOALS)),)
|
||||
-include $(DEPS)
|
||||
else
|
||||
# In case we want to compile a single target, include the .d file for it
|
||||
ifneq ($(filter %.o, $(MAKECMDGOALS)),)
|
||||
SINGLE_DEP := $(filter %.o, $(MAKECMDGOALS))
|
||||
-include $(SINGLE_DEP:%.o=%.d)
|
||||
endif
|
||||
endif
|
||||
|
||||
# Find the deps via GCC. Rarely wrong, but a bit slow
|
||||
|
||||
$(OBJS_C:%.o=%.d): %.d: $(SRC_DIR)/%.c $(FILE_DEP)
|
||||
$(E) '$(STAGE) DEP $(<:$(SRC_DIR)/%.c=%.c)'
|
||||
$(Q)$(CC_HOST) $(CFLAGS) -MM $< | sed 's@^$(@F:%.d=%.o):@$@ $(@:%.d=%.o):@' > $@
|
||||
|
||||
$(OBJS_CPP:%.o=%.d): %.d: $(SRC_DIR)/%.cpp $(FILE_DEP)
|
||||
$(E) '$(STAGE) DEP $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_HOST) $(CFLAGS) $(CXXFLAGS) -MM $< | sed 's@^$(@F:%.d=%.o):@$@ $(@:%.d=%.o):@' > $@
|
||||
|
||||
$(OBJS_MM:%.o=%.d): %.d: $(SRC_DIR)/%.mm $(FILE_DEP)
|
||||
$(E) '$(STAGE) DEP $(<:$(SRC_DIR)/%.mm=%.mm)'
|
||||
$(Q)$(CXX_HOST) $(CFLAGS) $(CXXFLAGS) -MM $< | sed 's@^$(@F:%.d=%.o):@$@ $(@:%.d=%.o):@' > $@
|
||||
|
||||
$(OBJS_RC:%.o=%.d): %.d: $(SRC_DIR)/%.rc $(FILE_DEP)
|
||||
$(E) '$(STAGE) DEP $(<:$(SRC_DIR)/%.rc=%.rc)'
|
||||
$(Q)touch $@
|
||||
|
||||
else
|
||||
# The much faster, but can be wrong, dep-check
|
||||
DEP_MASK :=
|
||||
DEPS := Makefile.dep
|
||||
|
||||
# Only include the deps if we are not cleaning
|
||||
ifeq ($(filter depend clean mrproper, $(MAKECMDGOALS)),)
|
||||
-include Makefile.dep
|
||||
endif
|
||||
|
||||
ifeq ("$(SRC_OBJS_DIR)/$(DEPEND)","$(MAKEDEPEND)")
|
||||
DEP := $(MAKEDEPEND)
|
||||
$(SRC_OBJS_DIR)/$(DEPEND): $(SRC_DIR)/depend/depend.cpp
|
||||
$(E) '$(STAGE) Compiling and linking $(DEPEND)'
|
||||
$(Q)$(CXX_BUILD) $(CFLAGS_BUILD) $(CXXFLAGS_BUILD) $(LDFLAGS_BUILD) -o $@ $<
|
||||
endif
|
||||
|
||||
# Macro for invoking a command on groups of 100 words at a time
|
||||
# (analogous to xargs(1)). The macro invokes itself recursively
|
||||
# until the list of words is depleted.
|
||||
#
|
||||
# Usage: $(call xargs,COMMAND,LIST)
|
||||
#
|
||||
# COMMAND should be a shell command to which the words will be
|
||||
# appended as arguments in groups of 100.
|
||||
define xargs
|
||||
$(1) $(wordlist 1,100,$(2))
|
||||
$(if $(word 101,$(2)),$(call xargs,$(1),$(wordlist 101,$(words $(2)),$(2))))
|
||||
endef
|
||||
|
||||
# Make sure that only 'make depend' ALWAYS triggers a recheck
|
||||
ifeq ($(filter depend, $(MAKECMDGOALS)),)
|
||||
Makefile.dep: $(FILE_DEP) $(SRCS:%=$(SRC_DIR)/%) $(CONFIG_CACHE_SOURCE) $(DEP)
|
||||
else
|
||||
Makefile.dep: $(FILE_DEP) $(SRCS:%=$(SRC_DIR)/%) $(DEP) FORCE
|
||||
endif
|
||||
$(E) '$(STAGE) DEP CHECK (all files)'
|
||||
$(Q)rm -f Makefile.dep.tmp
|
||||
$(Q)touch Makefile.dep.tmp
|
||||
|
||||
# Calculate the deps via makedepend
|
||||
$(call xargs,$(Q)$(MAKEDEPEND) -f$(SRC_OBJS_DIR)/Makefile.dep.tmp -o.o -Y -v -a -- $(CFLAGS_MAKEDEP) -- 2>/dev/null,$(SRCS:%=$(SRC_DIR)/%))
|
||||
|
||||
# Remove all comments and includes that don't start with $(SRC_DIR)
|
||||
# Remove $(SRC_DIR) from object-file-name
|
||||
@$(AWK) ' \
|
||||
/^# DO NOT/ { print $$0 ; next} \
|
||||
/^#/ {next} \
|
||||
/: / { \
|
||||
left = NF - 1; \
|
||||
for (n = 2; n <= NF; n++) { \
|
||||
if (match($$n, "^$(ROOT_DIR)") == 0) { \
|
||||
$$n = ""; \
|
||||
left--; \
|
||||
} \
|
||||
} \
|
||||
gsub("$(SRC_DIR)/", "", $$1); \
|
||||
if (left > 0) { \
|
||||
print $$0; \
|
||||
$$1 = "Makefile.dep:"; \
|
||||
print $$0; \
|
||||
} \
|
||||
next \
|
||||
} \
|
||||
{ \
|
||||
print $$0 \
|
||||
} \
|
||||
' < Makefile.dep.tmp | sed 's@ *@ @g;s@ $$@@' | LC_ALL=C $(SORT) > Makefile.dep
|
||||
|
||||
$(Q)rm -f Makefile.dep.tmp Makefile.dep.tmp.bak
|
||||
|
||||
endif
|
||||
|
||||
# Avoid problems with deps if a .h/.hpp/.hpp.sq file is deleted without the deps
|
||||
# being updated. Now the Makefile continues, the deps are recreated
|
||||
# and all will be fine.
|
||||
%.h %.hpp %.hpp.sq:
|
||||
@true
|
||||
|
||||
|
||||
# Compile all the files according to the targets
|
||||
|
||||
$(OBJS_C): %.o: $(SRC_DIR)/%.c $(DEP_MASK) $(FILE_DEP)
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.c=%.c)'
|
||||
$(Q)$(CC_HOST) $(CFLAGS) -c -o $@ $<
|
||||
|
||||
$(filter-out %sse2.o, $(filter-out %ssse3.o, $(filter-out %sse4.o, $(OBJS_CPP)))): %.o: $(SRC_DIR)/%.cpp $(DEP_MASK) $(FILE_DEP)
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_HOST) $(CFLAGS) $(CXXFLAGS) -c -o $@ $<
|
||||
|
||||
$(filter %sse2.o, $(OBJS_CPP)): %.o: $(SRC_DIR)/%.cpp $(DEP_MASK) $(FILE_DEP)
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_HOST) $(CFLAGS) $(CXXFLAGS) -c -msse2 -o $@ $<
|
||||
|
||||
$(filter %ssse3.o, $(OBJS_CPP)): %.o: $(SRC_DIR)/%.cpp $(DEP_MASK) $(FILE_DEP)
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_HOST) $(CFLAGS) $(CXXFLAGS) -c -mssse3 -o $@ $<
|
||||
|
||||
$(filter %sse4.o, $(OBJS_CPP)): %.o: $(SRC_DIR)/%.cpp $(DEP_MASK) $(FILE_DEP)
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.cpp=%.cpp)'
|
||||
$(Q)$(CXX_HOST) $(CFLAGS) $(CXXFLAGS) -c -msse4.1 -o $@ $<
|
||||
|
||||
$(OBJS_MM): %.o: $(SRC_DIR)/%.mm $(DEP_MASK) $(FILE_DEP)
|
||||
$(E) '$(STAGE) Compiling $(<:$(SRC_DIR)/%.mm=%.mm)'
|
||||
$(Q)$(CXX_HOST) $(CFLAGS) $(CXXFLAGS) -c -o $@ $<
|
||||
|
||||
$(OBJS_RC): %.o: $(SRC_DIR)/%.rc $(FILE_DEP)
|
||||
$(E) '$(STAGE) Compiling resource $(<:$(SRC_DIR)/%.rc=%.rc)'
|
||||
$(Q)$(WINDRES) -o $@ $<
|
||||
|
||||
$(BIN_DIR)/$(TTD): $(TTD)
|
||||
$(Q)cp $(TTD) $(BIN_DIR)/$(TTD)
|
||||
ifeq ($(OS), UNIX)
|
||||
$(Q)cp $(MEDIA_DIR)/openttd.32.bmp $(BIN_DIR)/baseset/
|
||||
endif
|
||||
ifeq ($(OS), OSX)
|
||||
$(Q)cp $(ROOT_DIR)/os/macosx/splash.png $(BIN_DIR)/baseset/
|
||||
endif
|
||||
|
||||
$(TTD): $(OBJS) $(CONFIG_CACHE_LINKER)
|
||||
$(E) '$(STAGE) Linking $@'
|
||||
$(Q)+$(CXX_HOST) $(LDFLAGS) $(OBJS) $(LIBS) -o $@
|
||||
ifdef STRIP
|
||||
$(Q)$(STRIP) $@
|
||||
endif
|
||||
|
||||
# Revision files
|
||||
|
||||
$(SRC_DIR)/rev.cpp: $(CONFIG_CACHE_VERSION) $(SRC_DIR)/rev.cpp.in
|
||||
$(Q)cat $(SRC_DIR)/rev.cpp.in | sed "s@\!\!ISODATE\!\!@$(ISODATE)@g;s@!!VERSION!!@$(VERSION)@g;s@!!MODIFIED!!@$(MODIFIED)@g;s@!!DATE!!@`date +%d.%m.%y`@g;s@!!GITHASH!!@$(GITHASH)@g;s@!!ISTAG!!@$(ISTAG)@g;s@!!ISSTABLETAG!!@$(ISSTABLETAG)@g;s@!!YEAR!!@$(YEAR)@g" > $(SRC_DIR)/rev.cpp
|
||||
|
||||
$(SRC_DIR)/os/windows/ottdres.rc: $(CONFIG_CACHE_VERSION) $(SRC_DIR)/os/windows/ottdres.rc.in
|
||||
$(Q)cat $(SRC_DIR)/os/windows/ottdres.rc.in | sed "s@\!\!ISODATE\!\!@$(ISODATE)@g;s@!!VERSION!!@$(VERSION)@g;s@!!DATE!!@`date +%d.%m.%y`@g;s@!!GITHASH!!@$(GITHASH)@g;s@!!ISTAG!!@$(ISTAG)@g;s@!!ISSTABLETAG!!@$(ISSTABLETAG)@g;s@!!YEAR!!@$(YEAR)@g" > $(SRC_DIR)/os/windows/ottdres.rc
|
||||
|
||||
FORCE:
|
||||
|
||||
depend: $(DEPS)
|
||||
|
||||
clean:
|
||||
$(E) '$(STAGE) Cleaning up object files'
|
||||
$(Q)rm -f $(DEPS) $(OBJS) $(TTD) $(DEPEND) $(TTD:%=$(BIN_DIR)/%) $(BIN_DIR)/baseset/openttd.32.bmp $(CONFIG_CACHE_COMPILER) $(CONFIG_CACHE_LINKER) $(CONFIG_CACHE_SOURCE)
|
||||
|
||||
mrproper: clean
|
||||
$(Q)rm -f $(SRC_DIR)/rev.cpp $(SRC_DIR)/os/windows/ottdres.rc
|
||||
|
||||
%.o:
|
||||
@echo '$(STAGE) No such source-file: $(@:%.o=%).[c|cpp|mm|rc]'
|
||||
|
||||
.PHONY: all mrproper depend clean FORCE
|
39
README.md
39
README.md
@@ -34,7 +34,7 @@ Both 'stable' and 'nightly' versions are available for download:
|
||||
- most people should choose the 'stable' version, as this has been more extensively tested
|
||||
- the 'nightly' version includes the latest changes and features, but may sometimes be less reliable
|
||||
|
||||
OpenTTD is also available for free on [Steam](https://store.steampowered.com/app/1536610/OpenTTD/), [GOG.com](https://www.gog.com/game/openttd), and the [Microsoft Store](https://www.microsoft.com/p/openttd-official/9ncjg5rvrr1c). On some platforms OpenTTD will be available via your OS package manager or a similar service.
|
||||
On some platforms OpenTTD will also be available via your OS package manager or a similar service.
|
||||
|
||||
|
||||
## 1.2) OpenTTD gameplay manual
|
||||
@@ -46,13 +46,15 @@ OpenTTD has a [community-maintained wiki](https://wiki.openttd.org/), including
|
||||
|
||||
OpenTTD has been ported to several platforms and operating systems.
|
||||
|
||||
The currently supported platforms are:
|
||||
The currently working platforms are:
|
||||
|
||||
- Linux (SDL (OpenGL and non-OpenGL))
|
||||
- macOS (universal) (Cocoa)
|
||||
- Windows (Win32 GDI / OpenGL)
|
||||
|
||||
Other platforms may also work (in particular various BSD systems), but we don't actively test or maintain these.
|
||||
- FreeBSD (SDL)
|
||||
- Haiku (SDL)
|
||||
- Linux (SDL)
|
||||
- macOS (universal) (Cocoa video and sound drivers)
|
||||
- OpenBSD (SDL)
|
||||
- OS/2 (SDL)
|
||||
- Windows (Win32 GDI (faster) or SDL)
|
||||
|
||||
### 1.3.1) Legacy support
|
||||
Platforms, languages and compilers change.
|
||||
@@ -63,13 +65,13 @@ Please report a bug if you find a save that doesn't load.
|
||||
|
||||
## 1.4) Installing and running OpenTTD
|
||||
|
||||
OpenTTD is usually straightforward to install, but for more help the wiki [includes an installation guide](https://wiki.openttd.org/en/Manual/Installation).
|
||||
OpenTTD is usually straightforward to install, but for more help the wiki [includes an installation guide](https://wiki.openttd.org/Installation).
|
||||
|
||||
OpenTTD needs some additional graphics and sound files to run.
|
||||
|
||||
For some platforms these will be downloaded during the installation process if required.
|
||||
|
||||
For some platforms, you will need to refer to [the installation guide](https://wiki.openttd.org/en/Manual/Installation).
|
||||
For some platforms, you will need to refer to [the installation guide](https://wiki.openttd.org/Installation).
|
||||
|
||||
|
||||
### 1.4.1) Free graphics and sound files
|
||||
@@ -77,9 +79,9 @@ For some platforms, you will need to refer to [the installation guide](https://w
|
||||
The free data files, split into OpenGFX for graphics, OpenSFX for sounds and
|
||||
OpenMSX for music can be found at:
|
||||
|
||||
- https://www.openttd.org/downloads/opengfx-releases/ for OpenGFX
|
||||
- https://www.openttd.org/downloads/opensfx-releases/ for OpenSFX
|
||||
- https://www.openttd.org/downloads/openmsx-releases/ for OpenMSX
|
||||
- https://www.openttd.org/download-opengfx for OpenGFX
|
||||
- https://www.openttd.org/download-opensfx for OpenSFX
|
||||
- https://www.openttd.org/download-openmsx for OpenMSX
|
||||
|
||||
Please follow the readme of these packages about the installation procedure.
|
||||
The Windows installer can optionally download and install these packages.
|
||||
@@ -114,7 +116,7 @@ OpenTTD features multiple types of add-on content, which modify gameplay in diff
|
||||
|
||||
Most types of add-on content can be downloaded within OpenTTD via the 'Check Online Content' button in the main menu.
|
||||
|
||||
Add-on content can also be installed manually, but that's more complicated; the [OpenTTD wiki](https://wiki.openttd.org/) may offer help with that, or the [OpenTTD directory structure guide](./docs/directory_structure.md).
|
||||
Add-on content can also be installed manually, but that's more complicated; the [OpenTTD wiki](https://wiki.openttd.org/OpenTTD) may offer help with that, or the [OpenTTD directory structure guide](./docs/directory_structure.md).
|
||||
|
||||
### 1.5.1) AI opponents
|
||||
|
||||
@@ -136,7 +138,7 @@ A wide range of add-content is available as NewGRFs, including vehicles, industr
|
||||
|
||||
NewGRFs can be added via the 'Check Online Content' button in the main menu.
|
||||
|
||||
See also the wiki [guide to NewGRFs](https://wiki.openttd.org/en/Manual/NewGRF) and [the forum graphics development section](https://www.tt-forums.net/viewforum.php?f=66).
|
||||
See also the wiki [guide to NewGRFs](https://wiki.openttd.org/NewGRF) and [the forum graphics development section](https://www.tt-forums.net/viewforum.php?f=66).
|
||||
|
||||
### 1.5.4) Game scripts
|
||||
|
||||
@@ -144,7 +146,7 @@ Game scripts can provide additional challenges or changes to the standard OpenTT
|
||||
|
||||
Game scripts can be added via the 'Check Online Content' button in the main menu.
|
||||
|
||||
See also the wiki [guide to game scripts](https://wiki.openttd.org/en/Manual/Game%20script) and [the forum graphics game script section](https://www.tt-forums.net/viewforum.php?f=65).
|
||||
See also the wiki [guide to game scripts](https://wiki.openttd.org/Game_script) and [the forum graphics game script section](https://www.tt-forums.net/viewforum.php?f=65).
|
||||
|
||||
### 1.6) OpenTTD directories
|
||||
|
||||
@@ -162,14 +164,14 @@ If you want to compile OpenTTD from source, instructions can be found in [COMPIL
|
||||
'Official' channels
|
||||
|
||||
- [OpenTTD website](https://www.openttd.org)
|
||||
- IRC chat using #openttd on irc.oftc.net [more info about our irc channel](https://wiki.openttd.org/en/Development/IRC%20channel)
|
||||
- IRC chat using #openttd on irc.oftc.net [more info about our irc channel](https://wiki.openttd.org/Irc)
|
||||
- [OpenTTD on Github](https://github.com/openTTD/) for code repositories and for reporting issues
|
||||
- [forum.openttd.org](https://forum.openttd.org/) - the primary community forum site for discussing OpenTTD and related games
|
||||
- [OpenTTD wiki](https://wiki.openttd.org/) community-maintained wiki, including topics like gameplay guide, detailed explanation of some game mechanics, how to use add-on content (mods) and much more
|
||||
|
||||
'Unofficial' channels
|
||||
|
||||
- the OpenTTD wiki has a [page listing OpenTTD communities](https://wiki.openttd.org/en/Community/Community) including some in languages other than English
|
||||
- the OpenTTD wiki has a [page listing OpenTTD communities](https://wiki.openttd.org/Community) including some in languages other than English
|
||||
|
||||
|
||||
### 2.1) Contributing to OpenTTD
|
||||
@@ -205,9 +207,6 @@ See the comments in the source files in `src/3rdparty/md5` for the complete lice
|
||||
The implementations of Posix `getaddrinfo` and `getnameinfo` for OS/2 in `src/3rdparty/os2` are distributed partly under the GNU Lesser General Public License 2.1, and partly under the (3-clause) BSD license.
|
||||
The exact licensing terms can be found in `src/3rdparty/os2/getaddrinfo.c` resp. `src/3rdparty/os2/getnameinfo.c`.
|
||||
|
||||
The fmt implementation in `src/3rdparty/fmt` is licensed under the MIT license.
|
||||
See `src/3rdparty/fmt/LICENSE.rst` for the complete license text.
|
||||
|
||||
|
||||
## 4.0 Credits
|
||||
|
||||
|
72
azure-pipelines-ci.yml
Normal file
72
azure-pipelines-ci.yml
Normal file
@@ -0,0 +1,72 @@
|
||||
trigger:
|
||||
- master
|
||||
- release/*
|
||||
pr:
|
||||
- master
|
||||
- release/*
|
||||
|
||||
jobs:
|
||||
- job: windows
|
||||
displayName: 'Windows'
|
||||
pool:
|
||||
vmImage: 'VS2017-Win2016'
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
Win32:
|
||||
BuildPlatform: 'Win32'
|
||||
Win64:
|
||||
BuildPlatform: 'x64'
|
||||
|
||||
steps:
|
||||
- template: azure-pipelines/templates/ci-git-rebase.yml
|
||||
- template: azure-pipelines/templates/windows-dependencies.yml
|
||||
- template: azure-pipelines/templates/ci-opengfx.yml
|
||||
- template: azure-pipelines/templates/windows-build.yml
|
||||
parameters:
|
||||
BuildPlatform: $(BuildPlatform)
|
||||
BuildConfiguration: Debug
|
||||
- script: |
|
||||
call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" x86
|
||||
cd projects
|
||||
call regression.bat
|
||||
displayName: 'Test'
|
||||
|
||||
- job: linux
|
||||
displayName: 'Linux'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
linux-amd64-clang-3.9:
|
||||
Tag: 'linux-amd64-clang-3.9'
|
||||
linux-amd64-gcc-6:
|
||||
Tag: 'linux-amd64-gcc-6'
|
||||
linux-i386-gcc-6:
|
||||
Tag: 'linux-i386-gcc-6'
|
||||
|
||||
steps:
|
||||
- template: azure-pipelines/templates/ci-git-rebase.yml
|
||||
# The dockers already have the dependencies installed
|
||||
# The dockers already have OpenGFX installed
|
||||
- template: azure-pipelines/templates/linux-build.yml
|
||||
parameters:
|
||||
Image: compile-farm-ci
|
||||
Tag: $(Tag)
|
||||
|
||||
- job: macos
|
||||
displayName: 'MacOS'
|
||||
pool:
|
||||
vmImage: 'macOS-10.14'
|
||||
|
||||
variables:
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.9
|
||||
|
||||
steps:
|
||||
- template: azure-pipelines/templates/ci-git-rebase.yml
|
||||
- template: azure-pipelines/templates/osx-dependencies.yml
|
||||
- template: azure-pipelines/templates/ci-opengfx.yml
|
||||
- template: azure-pipelines/templates/osx-build.yml
|
||||
- script: 'make regression'
|
||||
displayName: 'Test'
|
10
azure-pipelines-release-stable.yml
Normal file
10
azure-pipelines-release-stable.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
trigger:
|
||||
branches:
|
||||
include:
|
||||
- refs/tags/*
|
||||
pr: none
|
||||
|
||||
jobs:
|
||||
- template: azure-pipelines/templates/release.yml
|
||||
parameters:
|
||||
IsStableRelease: true
|
7
azure-pipelines-release.yml
Normal file
7
azure-pipelines-release.yml
Normal file
@@ -0,0 +1,7 @@
|
||||
trigger: none
|
||||
pr: none
|
||||
|
||||
jobs:
|
||||
- template: azure-pipelines/templates/release.yml
|
||||
parameters:
|
||||
IsStableRelease: false
|
@@ -4,9 +4,9 @@ tag=$(git name-rev --name-only --tags --no-undefined HEAD 2>/dev/null | sed 's@\
|
||||
|
||||
# If we are a tag, show the part of the changelog till (but excluding) the last stable
|
||||
if [ -n "$tag" ]; then
|
||||
grep='^[0-9]\+\.[0-9]\+[^-]'
|
||||
grep='^[0-9]\+\.[0-9]\+\.[0-9]\+[^-]'
|
||||
next=$(cat changelog.txt | grep '^[0-9]' | awk 'BEGIN { show="false" } // { if (show=="true") print $0; if ($1=="'$tag'") show="true"} ' | grep "$grep" | head -n1 | sed 's/ .*//')
|
||||
cat changelog.txt | awk 'BEGIN { show="false" } /^[0-9]+.[0-9]+/ { if ($1=="'$next'") show="false"; if ($1=="'$tag'") show="true";} // { if (show=="true") print $0 }'
|
||||
cat changelog.txt | awk 'BEGIN { show="false" } /^[0-9]+.[0-9]+.[0-9]+/ { if ($1=="'$next'") show="false"; if ($1=="'$tag'") show="true";} // { if (show=="true") print $0 }'
|
||||
exit 0
|
||||
fi
|
||||
|
87
azure-pipelines/manifest.sh
Executable file
87
azure-pipelines/manifest.sh
Executable file
@@ -0,0 +1,87 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "Usage: $0 <folder-with-bundles>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
FOLDER=$1
|
||||
|
||||
if [ ! -e .version ] || [ ! -e .release_date ]; then
|
||||
echo "This script should be executed in the root of an extracted source tarball"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Find the name based on the version
|
||||
if [ -e .is_stable ]; then
|
||||
isTesting=$(cat .version | grep "RC\|beta" || true)
|
||||
if [ -z "${isTesting}" ]; then
|
||||
NAME="stable"
|
||||
else
|
||||
NAME="testing"
|
||||
fi
|
||||
else
|
||||
NAME=$(cat .version | cut -d- -f2 | cut -d- -f-2)
|
||||
fi
|
||||
|
||||
# Convert the date to a YAML date
|
||||
DATE=$(cat .release_date | tr ' ' T | sed 's/TUTC/:00-00:00/')
|
||||
VERSION=$(cat .version)
|
||||
BASE="openttd-${VERSION}"
|
||||
|
||||
echo "name: ${NAME}" > manifest.yaml
|
||||
echo "date: ${DATE}" >> manifest.yaml
|
||||
echo "base: ${BASE}-" >> manifest.yaml
|
||||
|
||||
error=""
|
||||
|
||||
FILES=
|
||||
DEV_FILES=
|
||||
for filename in $(ls ${FOLDER} | grep -v ".txt$\|.md$\|sum$" | sort); do
|
||||
case ${filename} in
|
||||
*docs* |\
|
||||
*source* |\
|
||||
*dbg.deb |\
|
||||
*pdb.xz )
|
||||
DEV_FILES="${DEV_FILES} ${filename}"
|
||||
;;
|
||||
|
||||
*)
|
||||
FILES="${FILES} ${filename}"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# output_files key filename...
|
||||
output_files() {
|
||||
if [ "$#" -lt 2 ]; then return; fi
|
||||
key=$1
|
||||
echo "${key}:" >> manifest.yaml
|
||||
shift
|
||||
while [ "$#" -gt 0 ]; do
|
||||
filename=$1
|
||||
if [ ! -e ${FOLDER}/${filename}.md5sum ] || [ ! -e ${FOLDER}/${filename}.sha1sum ] || [ ! -e ${FOLDER}/${filename}.sha256sum ]; then
|
||||
echo "ERROR: missing checksum file for ${filename}" 1>&2
|
||||
error="y"
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "- id: ${filename}" >> manifest.yaml
|
||||
echo " size: $(stat -c"%s" ${FOLDER}/${filename})" >> manifest.yaml
|
||||
echo " md5sum: $(cat ${FOLDER}/${filename}.md5sum | cut -d\ -f1)" >> manifest.yaml
|
||||
echo " sha1sum: $(cat ${FOLDER}/${filename}.sha1sum | cut -d\ -f1)" >> manifest.yaml
|
||||
echo " sha256sum: $(cat ${FOLDER}/${filename}.sha256sum | cut -d\ -f1)" >> manifest.yaml
|
||||
shift
|
||||
done
|
||||
}
|
||||
|
||||
output_files files ${FILES}
|
||||
output_files dev_files ${DEV_FILES}
|
||||
|
||||
if [ -n "${error}" ]; then
|
||||
echo "ERROR: exiting due to earlier errors" 1>&2
|
||||
exit 1
|
||||
fi
|
10
azure-pipelines/templates/ci-git-rebase.yml
Normal file
10
azure-pipelines/templates/ci-git-rebase.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
steps:
|
||||
# Rebase to target branch for every PR. This means users don't have to
|
||||
# rebase every time target branch changes. As long as the PR applies cleanly, we
|
||||
# will validate it.
|
||||
- bash: |
|
||||
git config user.email 'info@openttd.org'
|
||||
git config user.name 'OpenTTD CI'
|
||||
git rebase origin/${SYSTEM_PULLREQUEST_TARGETBRANCH}
|
||||
displayName: 'Rebase to target branch'
|
||||
condition: and(succeeded(), eq(variables['Build.Reason'], 'PullRequest'))
|
8
azure-pipelines/templates/ci-opengfx.yml
Normal file
8
azure-pipelines/templates/ci-opengfx.yml
Normal file
@@ -0,0 +1,8 @@
|
||||
steps:
|
||||
- bash: |
|
||||
set -ex
|
||||
cd bin/baseset
|
||||
curl -L https://cdn.openttd.org/opengfx-releases/0.6.0/opengfx-0.6.0-all.zip > opengfx-all.zip
|
||||
unzip opengfx-all.zip
|
||||
rm -f opengfx-all.zip
|
||||
displayName: 'Install OpenGFX'
|
36
azure-pipelines/templates/linux-build.yml
Normal file
36
azure-pipelines/templates/linux-build.yml
Normal file
@@ -0,0 +1,36 @@
|
||||
parameters:
|
||||
Image: ''
|
||||
Tag: ''
|
||||
ContainerCommand: ''
|
||||
|
||||
steps:
|
||||
# 'envVars' in the 'Docker@1' task is a bit funky. When you want to use a
|
||||
# variable, you have to quote it. But the quote is also sent directly to
|
||||
# Docker and ends up in the variable, which you don't want. To work around
|
||||
# this, we set the correct variable first (which becomes an env-variable), and
|
||||
# pass that env-variable through to Docker. We cannot use the normal
|
||||
# 'variables' entry, as we are a template. So that results in this bit of
|
||||
# Bash code. Not because it is pretty, but it is the only way we found that
|
||||
# works.
|
||||
- bash: |
|
||||
echo "##vso[task.setvariable variable=TARGET_BRANCH]${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
||||
echo "Target branch is ${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
||||
displayName: "Set target branch"
|
||||
condition: and(succeeded(), eq(variables['Build.Reason'], 'PullRequest'))
|
||||
|
||||
- task: Docker@1
|
||||
${{ if eq(parameters.Image, 'compile-farm') }}:
|
||||
displayName: 'Build'
|
||||
${{ if eq(parameters.Image, 'compile-farm-ci') }}:
|
||||
displayName: 'Build and test'
|
||||
# Run the commit-checker only if it is a Pull Request
|
||||
condition: and(succeeded(), or(not(contains(variables['Agent.JobName'], 'commit-checker')), eq(variables['Build.Reason'], 'PullRequest')))
|
||||
inputs:
|
||||
command: 'Run an image'
|
||||
imageName: openttd/${{ parameters.Image }}:${{ parameters.Tag }}
|
||||
volumes: '$(Build.SourcesDirectory):$(Build.SourcesDirectory)'
|
||||
workingDirectory: '$(Build.SourcesDirectory)'
|
||||
containerCommand: ${{ parameters.ContainerCommand }}
|
||||
runInBackground: false
|
||||
envVars: |
|
||||
TARGET_BRANCH
|
5
azure-pipelines/templates/linux-claim-bundles.yml
Normal file
5
azure-pipelines/templates/linux-claim-bundles.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
steps:
|
||||
# Because we run the compile in a docker (under root), we are not owner
|
||||
# of the 'bundles' folder. Fix that by executing a chown on it.
|
||||
- bash: sudo chown -R $(id -u):$(id -g) bundles
|
||||
displayName: 'Claim bundles folder back'
|
5
azure-pipelines/templates/osx-build.yml
Normal file
5
azure-pipelines/templates/osx-build.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
steps:
|
||||
- script: './configure PKG_CONFIG_PATH=/usr/local/lib/pkgconfig --enable-static'
|
||||
displayName: 'Configure'
|
||||
- script: 'make -j2'
|
||||
displayName: 'Build'
|
12
azure-pipelines/templates/osx-dependencies.yml
Normal file
12
azure-pipelines/templates/osx-dependencies.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
steps:
|
||||
- script: |
|
||||
set -ex
|
||||
HOMEBREW_NO_AUTO_UPDATE=1 brew install pkg-config lzo xz libpng freetype
|
||||
# Remove the dynamic libraries of these libraries, to ensure we use
|
||||
# the static versions. That is important, as it is unlikely any
|
||||
# end-user has these brew libraries installed.
|
||||
rm /usr/local/Cellar/lzo/*/lib/*.dylib
|
||||
rm /usr/local/Cellar/xz/*/lib/*.dylib
|
||||
rm /usr/local/Cellar/libpng/*/lib/*.dylib
|
||||
rm /usr/local/Cellar/freetype/*/lib/*.dylib
|
||||
displayName: 'Install dependencies'
|
19
azure-pipelines/templates/release-bundles.yml
Normal file
19
azure-pipelines/templates/release-bundles.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
parameters:
|
||||
CalculateChecksums: true
|
||||
|
||||
steps:
|
||||
- ${{ if eq(parameters.CalculateChecksums, true) }}:
|
||||
- bash: |
|
||||
set -ex
|
||||
cd bundles
|
||||
for i in $(ls); do
|
||||
openssl dgst -r -md5 -hex $i > $i.md5sum
|
||||
openssl dgst -r -sha1 -hex $i > $i.sha1sum
|
||||
openssl dgst -r -sha256 -hex $i > $i.sha256sum
|
||||
done
|
||||
displayName: 'Calculate checksums'
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish bundles'
|
||||
inputs:
|
||||
PathtoPublish: bundles/
|
||||
ArtifactName: bundles
|
20
azure-pipelines/templates/release-fetch-source.yml
Normal file
20
azure-pipelines/templates/release-fetch-source.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
# Fetch the source tarball as prepared by an earlier job. In there is the
|
||||
# version predefined. This ensures we are all going to compile the same
|
||||
# source with the same version.
|
||||
|
||||
steps:
|
||||
- checkout: none
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: 'Download source'
|
||||
inputs:
|
||||
downloadType: specific
|
||||
itemPattern: 'bundles/openttd-*-source.tar.xz'
|
||||
downloadPath: '$(Build.ArtifactStagingDirectory)'
|
||||
- bash: tar --xz -xf ../a/bundles/openttd-*-source.tar.xz --strip-components=1
|
||||
displayName: 'Extracting source'
|
||||
- bash: |
|
||||
set -e
|
||||
VERSION=$(cat .version)
|
||||
echo "${VERSION}"
|
||||
echo "##vso[build.updatebuildnumber]${VERSION}"
|
||||
displayName: 'Change BuildNumber to version'
|
20
azure-pipelines/templates/release-manifest.yml
Normal file
20
azure-pipelines/templates/release-manifest.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
parameters:
|
||||
IsStableRelease: false
|
||||
|
||||
steps:
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: 'Download all bundles'
|
||||
inputs:
|
||||
downloadType: specific
|
||||
itemPattern: 'bundles/*'
|
||||
downloadPath: '$(Build.ArtifactStagingDirectory)'
|
||||
- ${{ if eq(parameters.IsStableRelease, true) }}:
|
||||
- script: |
|
||||
touch .is_stable
|
||||
displayName: 'Mark as stable release'
|
||||
- script: |
|
||||
set -ex
|
||||
./azure-pipelines/manifest.sh ../a/bundles/
|
||||
mkdir -p bundles
|
||||
mv manifest.yaml bundles/
|
||||
displayName: 'Create manifest.yaml'
|
35
azure-pipelines/templates/release-prepare-source.yml
Normal file
35
azure-pipelines/templates/release-prepare-source.yml
Normal file
@@ -0,0 +1,35 @@
|
||||
# Set the revisions, and remove the VCS files.
|
||||
# This ensures everything else picks up on the predefined versions, and not
|
||||
# that because of some build process the version all of a sudden changes.
|
||||
|
||||
steps:
|
||||
- script: |
|
||||
set -ex
|
||||
|
||||
if [ -n "${SYSTEM_PULLREQUEST_PULLREQUESTNUMBER}" ]; then
|
||||
# We are triggered from a GitHub Pull Request
|
||||
git checkout -B pr${SYSTEM_PULLREQUEST_PULLREQUESTNUMBER}
|
||||
elif [ "${BUILD_SOURCEBRANCHNAME}" = "merge" ] || [ "${BUILD_SOURCEBRANCHNAME}" = "head" ]; then
|
||||
# We are manually triggered based on a GitHub Pull Request
|
||||
PULLREQUESTNUMBER=$(echo ${BUILD_SOURCEBRANCH} | cut -d/ -f3)
|
||||
git checkout -B pr${PULLREQUESTNUMBER}
|
||||
else
|
||||
git checkout -B ${BUILD_SOURCEBRANCHNAME}
|
||||
fi
|
||||
|
||||
./findversion.sh > .ottdrev
|
||||
./azure-pipelines/changelog.sh > .changelog
|
||||
TZ='UTC' date +"%Y-%m-%d %H:%M UTC" > .release_date
|
||||
cat .ottdrev | cut -f 1 -d$'\t' > .version
|
||||
echo "Release Date: $(cat .release_date)"
|
||||
echo "Revision: $(cat .ottdrev)"
|
||||
echo "Version: $(cat .version)"
|
||||
displayName: 'Create version files'
|
||||
- script: |
|
||||
set -e
|
||||
VERSION=$(cat .version)
|
||||
echo "${VERSION}"
|
||||
echo "##vso[build.updatebuildnumber]${VERSION}"
|
||||
displayName: 'Change BuildNumber to version'
|
||||
- script: find . -iname .hg -or -iname .git -or -iname .svn | xargs rm -rf
|
||||
displayName: 'Remove VCS information'
|
186
azure-pipelines/templates/release.yml
Normal file
186
azure-pipelines/templates/release.yml
Normal file
@@ -0,0 +1,186 @@
|
||||
parameters:
|
||||
# If this is false, not all targets are triggered. For example:
|
||||
# The NSIS installer for Windows and the creation of debs only work for
|
||||
# releases. Not for any other type of binary. So they are skilled if this
|
||||
# is set to false.
|
||||
IsStableRelease: false
|
||||
|
||||
jobs:
|
||||
- job: source
|
||||
displayName: 'Source'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
|
||||
steps:
|
||||
- template: release-prepare-source.yml
|
||||
- script: |
|
||||
set -ex
|
||||
|
||||
# Rename the folder to openttd-NNN-source
|
||||
mkdir openttd-$(Build.BuildNumber)
|
||||
find . -maxdepth 1 -not -name . -not -name openttd-$(Build.BuildNumber) -exec mv {} openttd-$(Build.BuildNumber)/ \;
|
||||
# Copy back release_date, as it is needed for the template 'release-bundles'
|
||||
cp openttd-$(Build.BuildNumber)/.release_date .release_date
|
||||
|
||||
mkdir bundles
|
||||
tar --xz -cf bundles/openttd-$(Build.BuildNumber)-source.tar.xz openttd-$(Build.BuildNumber)
|
||||
zip -9 -r -q bundles/openttd-$(Build.BuildNumber)-source.zip openttd-$(Build.BuildNumber)
|
||||
displayName: 'Create bundle'
|
||||
- template: release-bundles.yml
|
||||
|
||||
- job: meta
|
||||
displayName: 'Metadata'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
dependsOn: source
|
||||
|
||||
steps:
|
||||
- template: release-fetch-source.yml
|
||||
- script: |
|
||||
set -ex
|
||||
|
||||
mkdir -p bundles
|
||||
cp .changelog bundles/changelog.txt
|
||||
cp .release_date bundles/released.txt
|
||||
cp README.md bundles/README.md
|
||||
displayName: 'Copy meta files'
|
||||
- template: release-bundles.yml
|
||||
parameters:
|
||||
CalculateChecksums: false
|
||||
|
||||
- job: docs
|
||||
displayName: 'Docs'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
dependsOn: source
|
||||
|
||||
steps:
|
||||
- template: release-fetch-source.yml
|
||||
- template: linux-build.yml
|
||||
parameters:
|
||||
Image: compile-farm
|
||||
ContainerCommand: '$(Build.BuildNumber)'
|
||||
Tag: docs
|
||||
- template: linux-claim-bundles.yml
|
||||
- template: release-bundles.yml
|
||||
|
||||
- job: windows
|
||||
displayName: 'Windows'
|
||||
pool:
|
||||
vmImage: 'VS2017-Win2016'
|
||||
dependsOn: source
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
Win32:
|
||||
BuildPlatform: 'Win32'
|
||||
BundlePlatform: 'win32'
|
||||
Win64:
|
||||
BuildPlatform: 'x64'
|
||||
BundlePlatform: 'win64'
|
||||
|
||||
steps:
|
||||
- template: release-fetch-source.yml
|
||||
- template: windows-dependencies.yml
|
||||
- template: windows-dependency-zip.yml
|
||||
- ${{ if eq(parameters.IsStableRelease, true) }}:
|
||||
- template: windows-dependency-nsis.yml
|
||||
- template: windows-build.yml
|
||||
parameters:
|
||||
BuildPlatform: $(BuildPlatform)
|
||||
BuildConfiguration: Release
|
||||
- bash: |
|
||||
set -ex
|
||||
make -f Makefile.msvc bundle_pdb bundle_zip PLATFORM=$(BundlePlatform) BUNDLE_NAME=openttd-$(Build.BuildNumber)-windows-$(BundlePlatform)
|
||||
displayName: 'Create bundles'
|
||||
- ${{ if eq(parameters.IsStableRelease, true) }}:
|
||||
- bash: |
|
||||
set -ex
|
||||
# NSIS will be part of the Hosted image in the next update. Till then, we set the PATH ourself
|
||||
export PATH="${PATH}:/c/Program Files (x86)/NSIS"
|
||||
make -f Makefile.msvc bundle_exe PLATFORM=$(BundlePlatform) BUNDLE_NAME=openttd-$(Build.BuildNumber)-windows-$(BundlePlatform)
|
||||
displayName: 'Create installer bundle'
|
||||
- template: release-bundles.yml
|
||||
|
||||
- ${{ if eq(parameters.IsStableRelease, true) }}:
|
||||
- job: linux_stable
|
||||
displayName: 'Linux'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
dependsOn: source
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
linux-ubuntu-xenial-i386-gcc:
|
||||
Tag: 'linux-ubuntu-xenial-i386-gcc'
|
||||
linux-ubuntu-xenial-amd64-gcc:
|
||||
Tag: 'linux-ubuntu-xenial-amd64-gcc'
|
||||
linux-ubuntu-bionic-i386-gcc:
|
||||
Tag: 'linux-ubuntu-bionic-i386-gcc'
|
||||
linux-ubuntu-bionic-amd64-gcc:
|
||||
Tag: 'linux-ubuntu-bionic-amd64-gcc'
|
||||
linux-ubuntu-focal-amd64-gcc:
|
||||
Tag: 'linux-ubuntu-focal-amd64-gcc'
|
||||
linux-debian-stretch-i386-gcc:
|
||||
Tag: 'linux-debian-stretch-i386-gcc'
|
||||
linux-debian-stretch-amd64-gcc:
|
||||
Tag: 'linux-debian-stretch-amd64-gcc'
|
||||
linux-debian-buster-i386-gcc:
|
||||
Tag: 'linux-debian-buster-i386-gcc'
|
||||
linux-debian-buster-amd64-gcc:
|
||||
Tag: 'linux-debian-buster-amd64-gcc'
|
||||
|
||||
steps:
|
||||
- template: release-fetch-source.yml
|
||||
- template: linux-build.yml
|
||||
parameters:
|
||||
Image: compile-farm
|
||||
ContainerCommand: '$(Build.BuildNumber)'
|
||||
Tag: $(Tag)
|
||||
- template: linux-claim-bundles.yml
|
||||
- template: release-bundles.yml
|
||||
|
||||
- job: macos
|
||||
displayName: 'MacOS'
|
||||
pool:
|
||||
vmImage: 'macOS-10.14'
|
||||
dependsOn: source
|
||||
|
||||
variables:
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.9
|
||||
|
||||
steps:
|
||||
- template: release-fetch-source.yml
|
||||
- template: osx-dependencies.yml
|
||||
- template: osx-build.yml
|
||||
- script: 'make bundle_zip bundle_dmg BUNDLE_NAME=openttd-$(Build.BuildNumber)-macosx'
|
||||
displayName: 'Create bundles'
|
||||
- template: release-bundles.yml
|
||||
|
||||
- job: manifest
|
||||
displayName: 'Manifest'
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
dependsOn:
|
||||
- source
|
||||
- docs
|
||||
- windows
|
||||
- ${{ if eq(parameters.IsStableRelease, true) }}:
|
||||
- linux_stable
|
||||
- macos
|
||||
# "Skipped" is not a status, and is not succeeded. So it seems to be
|
||||
# considered failed. So we trigger if all the earlier jobs are done (which
|
||||
# might be succeeded, failed, or skipped), and run this job. This is not
|
||||
# optimal, but given the rules, it is the only way to get this to work (as
|
||||
# some jobs might be skipped).
|
||||
condition: succeededOrFailed()
|
||||
|
||||
steps:
|
||||
- template: release-fetch-source.yml
|
||||
- template: release-manifest.yml
|
||||
${{ if eq(parameters.IsStableRelease, true) }}:
|
||||
parameters:
|
||||
IsStableRelease: true
|
||||
- template: release-bundles.yml
|
||||
parameters:
|
||||
CalculateChecksums: false
|
11
azure-pipelines/templates/windows-build.yml
Normal file
11
azure-pipelines/templates/windows-build.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
parameters:
|
||||
BuildPlatform: ''
|
||||
|
||||
steps:
|
||||
- task: VSBuild@1
|
||||
displayName: 'Build'
|
||||
inputs:
|
||||
solution: 'projects/openttd_vs141.sln'
|
||||
platform: ${{ parameters.BuildPlatform }}
|
||||
configuration: ${{ parameters.BuildConfiguration }}
|
||||
maximumCpuCount: true
|
14
azure-pipelines/templates/windows-dependencies.yml
Normal file
14
azure-pipelines/templates/windows-dependencies.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
steps:
|
||||
- bash: |
|
||||
set -ex
|
||||
|
||||
curl -L https://github.com/OpenTTD/CompileFarm/releases/download/latest/windows-dependencies.zip > windows-dependencies.zip
|
||||
unzip windows-dependencies.zip
|
||||
rm -f windows-dependencies.zip
|
||||
|
||||
mv windows-dependencies/installed /c/vcpkg/
|
||||
rm -rf windows-dependencies
|
||||
displayName: 'Install dependencies'
|
||||
workingDirectory: $(Build.ArtifactStagingDirectory)
|
||||
- script: c:\vcpkg\vcpkg.exe integrate install
|
||||
displayName: 'Integrate vcpkg'
|
26
azure-pipelines/templates/windows-dependency-nsis.yml
Normal file
26
azure-pipelines/templates/windows-dependency-nsis.yml
Normal file
@@ -0,0 +1,26 @@
|
||||
parameters:
|
||||
condition: true
|
||||
|
||||
steps:
|
||||
- bash: |
|
||||
set -ex
|
||||
|
||||
mkdir nsis-plugin; cd nsis-plugin
|
||||
curl -L https://devs.openttd.org/~truebrain/nsis-plugins/Nsis7z.zip > Nsis7z.zip
|
||||
unzip Nsis7z.zip
|
||||
cp -R Plugins/* "/c/Program Files (x86)/NSIS/Plugins/"
|
||||
cd ..; rm -rf nsis-plugin
|
||||
|
||||
mkdir nsis-plugin; cd nsis-plugin
|
||||
curl -L https://devs.openttd.org/~truebrain/nsis-plugins/NsisGetVersion.zip > NsisGetVersion.zip
|
||||
unzip NsisGetVersion.zip
|
||||
cp -R Plugins/* "/c/Program Files (x86)/NSIS/Plugins/x86-ansi/"
|
||||
cd ..; rm -rf nsis-plugin
|
||||
|
||||
mkdir nsis-plugin; cd nsis-plugin
|
||||
curl -L https://devs.openttd.org/~truebrain/nsis-plugins/NsisFindProc.zip > NsisFindProc.zip
|
||||
unzip NsisFindProc.zip
|
||||
cp -R *.dll "/c/Program Files (x86)/NSIS/Plugins/x86-ansi/"
|
||||
cd ..; rm -rf nsis-plugin
|
||||
displayName: 'Install NSIS with the 7z, GetVersion, and FindProc plugins'
|
||||
condition: and(succeeded(), ${{ parameters.condition }})
|
5
azure-pipelines/templates/windows-dependency-zip.yml
Normal file
5
azure-pipelines/templates/windows-dependency-zip.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
steps:
|
||||
- bash: |
|
||||
set -ex
|
||||
choco install zip
|
||||
displayName: 'Install zip'
|
@@ -1,2 +0,0 @@
|
||||
add_subdirectory(ai)
|
||||
add_subdirectory(game)
|
@@ -1,40 +0,0 @@
|
||||
set(AI_COMPAT_SOURCE_FILES
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_0.7.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.0.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.1.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.2.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.3.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.4.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.5.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.6.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.7.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.8.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.9.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.10.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.11.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_12.nut
|
||||
)
|
||||
|
||||
foreach(AI_COMPAT_SOURCE_FILE IN LISTS AI_COMPAT_SOURCE_FILES)
|
||||
string(REPLACE "${CMAKE_SOURCE_DIR}/bin/" "" AI_COMPAT_SOURCE_FILE_NAME "${AI_COMPAT_SOURCE_FILE}")
|
||||
string(CONCAT AI_COMPAT_BINARY_FILE "${CMAKE_BINARY_DIR}/" "${AI_COMPAT_SOURCE_FILE_NAME}")
|
||||
|
||||
add_custom_command(OUTPUT ${AI_COMPAT_BINARY_FILE}
|
||||
COMMAND ${CMAKE_COMMAND} -E copy
|
||||
${AI_COMPAT_SOURCE_FILE}
|
||||
${AI_COMPAT_BINARY_FILE}
|
||||
MAIN_DEPENDENCY ${AI_COMPAT_SOURCE_FILE}
|
||||
COMMENT "Copying ${AI_COMPAT_SOURCE_FILE_NAME}"
|
||||
)
|
||||
|
||||
list(APPEND AI_COMPAT_BINARY_FILES ${AI_COMPAT_BINARY_FILE})
|
||||
endforeach()
|
||||
|
||||
# Create a new target which copies all compat files
|
||||
add_custom_target(ai_compat_files
|
||||
DEPENDS ${AI_COMPAT_BINARY_FILES}
|
||||
)
|
||||
|
||||
add_dependencies(openttd
|
||||
ai_compat_files
|
||||
)
|
@@ -4,5 +4,3 @@
|
||||
* OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
* See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
AILog.Info("1.10 API compatibility in effect.");
|
||||
|
@@ -1,8 +0,0 @@
|
||||
/*
|
||||
* This file is part of OpenTTD.
|
||||
* OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
* OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
* See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
AILog.Info("1.11 API compatibility in effect.");
|
@@ -1,6 +0,0 @@
|
||||
/*
|
||||
* This file is part of OpenTTD.
|
||||
* OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
* OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
* See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
67
bin/ai/regression/completeness.sh
Executable file
67
bin/ai/regression/completeness.sh
Executable file
@@ -0,0 +1,67 @@
|
||||
#!/bin/sh
|
||||
|
||||
if ! [ -f ai/regression/completeness.sh ]; then
|
||||
echo "Make sure you are in the root of OpenTTD before starting this script."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cat ai/regression/tst_*/main.nut | tr ';' '\n' | awk '
|
||||
/^function/ {
|
||||
for (local in locals) {
|
||||
delete locals[local]
|
||||
}
|
||||
if (match($0, "function Regression::Start") || match($0, "function Regression::Stop")) next
|
||||
locals["this"] = "AIControllerSquirrel"
|
||||
}
|
||||
|
||||
/local/ {
|
||||
gsub(".*local", "local")
|
||||
if (match($4, "^AI")) {
|
||||
sub("\\(.*", "", $4)
|
||||
locals[$2] = $4
|
||||
}
|
||||
}
|
||||
|
||||
/Valuate/ {
|
||||
gsub(".*Valuate\\(", "")
|
||||
gsub("\\).*", "")
|
||||
gsub(",.*", "")
|
||||
gsub("\\.", "::")
|
||||
print $0
|
||||
}
|
||||
|
||||
/\./ {
|
||||
for (local in locals) {
|
||||
if (match($0, local ".")) {
|
||||
fname = substr($0, index($0, local "."))
|
||||
sub("\\(.*", "", fname)
|
||||
sub("\\.", "::", fname)
|
||||
sub(local, locals[local], fname)
|
||||
print fname
|
||||
if (match(locals[local], "List")) {
|
||||
sub(locals[local], "AIAbstractList", fname)
|
||||
print fname
|
||||
}
|
||||
}
|
||||
}
|
||||
# We want to remove everything before the FIRST occurrence of AI.
|
||||
# If we do not remove any other occurrences of AI from the string
|
||||
# we will remove everything before the LAST occurrence of AI, so
|
||||
# do some little magic to make it work the way we want.
|
||||
sub("AI", "AXXXXY")
|
||||
gsub("AI", "AXXXXX")
|
||||
sub(".*AXXXXY", "AI")
|
||||
if (match($0, "^AI") && match($0, ".")) {
|
||||
sub("\\(.*", "", $0)
|
||||
sub("\\.", "::", $0)
|
||||
print $0
|
||||
}
|
||||
}
|
||||
' | sed 's/ //g' | sort | uniq > tmp.in_regression
|
||||
|
||||
grep 'DefSQ.*Method' ../src/script/api/ai/*.hpp.sq | grep -v 'AIError::' | grep -v 'AIAbstractList::Valuate' | grep -v '::GetClassName' | sed 's/^[^,]*, &//g;s/,[^,]*//g' | sort > tmp.in_api
|
||||
|
||||
diff -u tmp.in_regression tmp.in_api | grep -v '^+++' | grep '^+' | sed 's/^+//'
|
||||
|
||||
rm -f tmp.in_regression tmp.in_api
|
||||
|
@@ -4,10 +4,9 @@ class Regression extends AIInfo {
|
||||
function GetShortName() { return "REGR"; }
|
||||
function GetDescription() { return "This runs regression-tests on some commands. On the same map the result should always be the same."; }
|
||||
function GetVersion() { return 1; }
|
||||
function GetAPIVersion() { return "12"; }
|
||||
function GetAPIVersion() { return "1.10"; }
|
||||
function GetDate() { return "2007-03-18"; }
|
||||
function CreateInstance() { return "Regression"; }
|
||||
function UseAsRandomAI() { return false; }
|
||||
}
|
||||
|
||||
RegisterAI(Regression());
|
69
bin/ai/regression/run.sh
Executable file
69
bin/ai/regression/run.sh
Executable file
@@ -0,0 +1,69 @@
|
||||
#!/bin/sh
|
||||
|
||||
if ! [ -f ai/regression/run.sh ]; then
|
||||
echo "Make sure you are in the root of OpenTTD before starting this script."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -f scripts/game_start.scr ]; then
|
||||
mv scripts/game_start.scr scripts/game_start.scr.regression
|
||||
fi
|
||||
|
||||
params=""
|
||||
gdb=""
|
||||
if [ "$1" != "-r" ]; then
|
||||
params="-snull -mnull -vnull:ticks=30000"
|
||||
fi
|
||||
if [ "$1" = "-g" ]; then
|
||||
gdb="gdb --ex run --args "
|
||||
fi
|
||||
|
||||
if [ -d "ai/regression/tst_$1" ]; then
|
||||
tests="ai/regression/tst_$1"
|
||||
elif [ -d "ai/regression/tst_$2" ]; then
|
||||
tests="ai/regression/tst_$2"
|
||||
else
|
||||
tests=ai/regression/tst_*
|
||||
fi
|
||||
|
||||
ret=0
|
||||
for tst in $tests; do
|
||||
echo -n "Running $tst... "
|
||||
|
||||
# Make sure that only one info.nut is present for each test run. Otherwise openttd gets confused.
|
||||
cp ai/regression/regression_info.nut $tst/info.nut
|
||||
|
||||
sav=$tst/test.sav
|
||||
if ! [ -f $sav ]; then
|
||||
sav=ai/regression/empty.sav
|
||||
fi
|
||||
|
||||
if [ -n "$gdb" ]; then
|
||||
$gdb ./openttd -x -c ai/regression/regression.cfg $params -g $sav
|
||||
else
|
||||
./openttd -x -c ai/regression/regression.cfg $params -g $sav -d script=2 -d misc=9 2>&1 | awk '{ gsub("0x(\\(nil\\)|0+)(x0)?", "0x00000000", $0); gsub("^dbg: \\[script\\]", "", $0); gsub("^ ", "ERROR: ", $0); gsub("ERROR: \\[1\\] ", "", $0); gsub("\\[P\\] ", "", $0); print $0; }' | grep -v '^dbg: \[.*\]' > $tst/tmp.regression
|
||||
fi
|
||||
|
||||
if [ -z "$gdb" ]; then
|
||||
res="`diff -ub $tst/result.txt $tst/tmp.regression`"
|
||||
if [ -z "$res" ]; then
|
||||
echo "passed!"
|
||||
else
|
||||
echo "failed! Difference:"
|
||||
echo "$res"
|
||||
ret=1
|
||||
fi
|
||||
fi
|
||||
|
||||
rm $tst/info.nut
|
||||
|
||||
if [ "$1" != "-k" ]; then
|
||||
rm -f $tst/tmp.regression
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -f scripts/game_start.scr.regression ]; then
|
||||
mv scripts/game_start.scr.regression scripts/game_start.scr
|
||||
fi
|
||||
|
||||
exit $ret
|
152
bin/ai/regression/run.vbs
Normal file
152
bin/ai/regression/run.vbs
Normal file
@@ -0,0 +1,152 @@
|
||||
Option Explicit
|
||||
|
||||
' This file is part of OpenTTD.
|
||||
' OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
' OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
' See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Dim FSO
|
||||
Set FSO = CreateObject("Scripting.FileSystemObject")
|
||||
|
||||
Function GetTestList()
|
||||
Dim retests, i, tests, dir
|
||||
Set retests = New RegExp
|
||||
Set GetTestList = CreateObject("Scripting.Dictionary")
|
||||
|
||||
retests.Pattern = "ai/regression/tst_*"
|
||||
retests.Global = True
|
||||
For i = 0 To WScript.Arguments.Count - 1
|
||||
Dim test
|
||||
test = "ai/regression/tst_" & WScript.Arguments.Item(i)
|
||||
If FSO.FolderExists(test) Then
|
||||
retests.Pattern = test
|
||||
Exit For
|
||||
End If
|
||||
Next
|
||||
|
||||
For Each dir In FSO.GetFolder("ai/regression/").SubFolders
|
||||
Dim name
|
||||
name = "ai/regression/" & dir.Name
|
||||
If retests.Test(name) Then
|
||||
GetTestList.Add name, name
|
||||
End If
|
||||
Next
|
||||
End Function
|
||||
|
||||
Function GetParams()
|
||||
GetParams = "-snull -mnull -vnull:ticks=30000"
|
||||
If WScript.Arguments.Count = 0 Then Exit Function
|
||||
If WScript.Arguments.Item(0) <> "-r" Then Exit Function
|
||||
GetParams = ""
|
||||
End Function
|
||||
|
||||
Sub FilterFile(filename)
|
||||
Dim lines, filter, file
|
||||
|
||||
Set file = FSO.OpenTextFile(filename, 1)
|
||||
If Not file.AtEndOfStream Then
|
||||
lines = file.ReadAll
|
||||
End If
|
||||
file.Close
|
||||
|
||||
Set filter = New RegExp
|
||||
filter.Global = True
|
||||
filter.Multiline = True
|
||||
filter.Pattern = "0x(\(nil\)|0+)(x0)?"
|
||||
lines = filter.Replace(lines, "0x00000000")
|
||||
filter.Pattern = "^dbg: \[script\]"
|
||||
lines = filter.Replace(lines, "")
|
||||
filter.Pattern = "^ "
|
||||
lines = filter.Replace(lines, "ERROR: ")
|
||||
filter.Pattern = "ERROR: \[1\] \[P\] "
|
||||
lines = filter.Replace(lines, "")
|
||||
filter.Pattern = "^dbg: .*\r\n"
|
||||
lines = filter.Replace(lines, "")
|
||||
|
||||
Set file = FSO.OpenTextFile(filename, 2)
|
||||
file.Write lines
|
||||
file.Close
|
||||
End Sub
|
||||
|
||||
Function CompareFiles(filename1, filename2)
|
||||
Dim file, lines1, lines2
|
||||
Set file = FSO.OpenTextFile(filename1, 1)
|
||||
If Not file.AtEndOfStream Then
|
||||
lines1 = file.ReadAll
|
||||
End IF
|
||||
file.Close
|
||||
Set file = FSO.OpenTextFile(filename2, 1)
|
||||
If Not file.AtEndOfStream Then
|
||||
lines2 = file.ReadAll
|
||||
End IF
|
||||
file.Close
|
||||
CompareFiles = (lines1 = lines2)
|
||||
End Function
|
||||
|
||||
Function RunTest(test, params, ret)
|
||||
Dim WshShell, oExec, sav, command
|
||||
Set WshShell = CreateObject("WScript.Shell")
|
||||
|
||||
' Make sure that only one info.nut is present for each test run. Otherwise openttd gets confused.
|
||||
FSO.CopyFile "ai/regression/regression_info.nut", test & "/info.nut"
|
||||
|
||||
sav = test & "/test.sav"
|
||||
If Not FSO.FileExists(sav) Then
|
||||
sav = "ai/regression/empty.sav"
|
||||
End If
|
||||
|
||||
command = ".\openttd -x -c ai/regression/regression.cfg " & params & " -g " & sav & " -d script=2 -d misc=9"
|
||||
' 2>&1 must be after >tmp.regression, else stderr is not redirected to the file
|
||||
WshShell.Run "cmd /c " & command & " >"& test & "/tmp.regression 2>&1", 0, True
|
||||
|
||||
FilterFile test & "/tmp.regression"
|
||||
|
||||
If CompareFiles(test & "/result.txt", test & "/tmp.regression") Then
|
||||
RunTest = "passed!"
|
||||
Else
|
||||
RunTest = "failed!"
|
||||
ret = 1
|
||||
End If
|
||||
|
||||
FSO.DeleteFile test & "/info.nut"
|
||||
|
||||
If WScript.Arguments.Count > 0 Then
|
||||
If WScript.Arguments.Item(0) = "-k" Then
|
||||
Exit Function
|
||||
End If
|
||||
End If
|
||||
|
||||
FSO.DeleteFile test & "/tmp.regression"
|
||||
End Function
|
||||
|
||||
On Error Resume Next
|
||||
WScript.StdOut.WriteLine ""
|
||||
If Err.Number <> 0 Then
|
||||
WScript.Echo "This script must be started with cscript."
|
||||
WScript.Quit 1
|
||||
End If
|
||||
On Error Goto 0
|
||||
|
||||
If Not FSO.FileExists("ai/regression/run.vbs") Then
|
||||
WScript.Echo "Make sure you are in the root of OpenTTD before starting this script."
|
||||
WScript.Quit 1
|
||||
End If
|
||||
|
||||
If FSO.FileExists("scripts/game_start.scr") Then
|
||||
FSO.MoveFile "scripts/game_start.scr", "scripts/game_start.scr.regression"
|
||||
End If
|
||||
|
||||
Dim params, test, ret
|
||||
params = GetParams()
|
||||
ret = 0
|
||||
|
||||
For Each test in GetTestList()
|
||||
WScript.StdOut.Write "Running " & test & "... "
|
||||
WScript.StdOut.WriteLine RunTest(test, params, ret)
|
||||
Next
|
||||
|
||||
If FSO.FileExists("scripts/game_start.scr.regression") Then
|
||||
FSO.MoveFile "scripts/game_start.scr.regression", "scripts/game_start.scr"
|
||||
End If
|
||||
|
||||
WScript.Quit ret
|
@@ -332,8 +332,7 @@ function Regression::Cargo()
|
||||
for (local i = -1; i < 15; i++) {
|
||||
print(" Cargo " + i);
|
||||
print(" IsValidCargo(): " + AICargo.IsValidCargo(i));
|
||||
print(" GetName(): '" + AICargo.GetName(i) + "'");
|
||||
print(" GetCargoLabel(): '" + AICargo.GetCargoLabel(i) + "'");
|
||||
print(" GetCargoLabel(): '" + AICargo.GetCargoLabel(i)+ "'");
|
||||
print(" IsFreight(): " + AICargo.IsFreight(i));
|
||||
print(" HasCargoClass(): " + AICargo.HasCargoClass(i, AICargo.CC_PASSENGERS));
|
||||
print(" GetTownEffect(): " + AICargo.GetTownEffect(i));
|
||||
@@ -427,31 +426,30 @@ function Regression::Company()
|
||||
print(" GetCompanyHQ(): " + AICompany.GetCompanyHQ(AICompany.COMPANY_SELF));
|
||||
print(" BuildCompanyHQ(): " + AICompany.BuildCompanyHQ(AIMap.GetTileIndex(129, 129)));
|
||||
print(" GetCompanyHQ(): " + AICompany.GetCompanyHQ(AICompany.COMPANY_SELF));
|
||||
print(" BuildCompanyHQ(): " + AICompany.BuildCompanyHQ(AIMap.GetTileIndex(239, 76)));
|
||||
print(" BuildCompanyHQ(): " + AICompany.BuildCompanyHQ(AIMap.GetTileIndex(129, 128)));
|
||||
print(" GetLastErrorString(): " + AIError.GetLastErrorString());
|
||||
print(" GetAutoRenewStatus(): " + AICompany.GetAutoRenewStatus(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewStatus(true): " + AICompany.SetAutoRenewStatus(true));
|
||||
print(" GetAutoRenewStatus(): " + AICompany.GetAutoRenewStatus(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewStatus(true): " + AICompany.SetAutoRenewStatus(true));
|
||||
print(" SetAutoRenewStatus(false): " + AICompany.SetAutoRenewStatus(false));
|
||||
print(" GetAutoRenewStatus(): " + AICompany.GetAutoRenewStatus(AICompany.COMPANY_SELF));
|
||||
print(" GetAutoRenewMonths(): " + AICompany.GetAutoRenewMonths(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMonths(-12): " + AICompany.SetAutoRenewMonths(-12));
|
||||
print(" GetAutoRenewMonths(): " + AICompany.GetAutoRenewMonths(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMonths(-12): " + AICompany.SetAutoRenewMonths(-12));
|
||||
print(" SetAutoRenewMonths(6): " + AICompany.SetAutoRenewMonths(6));
|
||||
print(" GetAutoRenewMoney(): " + AICompany.GetAutoRenewMoney(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMoney(200000): " + AICompany.SetAutoRenewMoney(200000));
|
||||
print(" GetAutoRenewMoney(): " + AICompany.GetAutoRenewMoney(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMoney(200000): " + AICompany.SetAutoRenewMoney(200000));
|
||||
print(" SetAutoRenewMoney(100000): " + AICompany.SetAutoRenewMoney(100000));
|
||||
print(" GetAutoRenewStatus(); " + AICompany.GetAutoRenewStatus(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewStatus(true); " + AICompany.SetAutoRenewStatus(true));
|
||||
print(" GetAutoRenewStatus(); " + AICompany.GetAutoRenewStatus(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewStatus(true); " + AICompany.SetAutoRenewStatus(true));
|
||||
print(" SetAutoRenewStatus(false); " + AICompany.SetAutoRenewStatus(false));
|
||||
print(" GetAutoRenewMonths(); " + AICompany.GetAutoRenewMonths(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMonths(-12); " + AICompany.SetAutoRenewMonths(-12));
|
||||
print(" GetAutoRenewMonths(); " + AICompany.GetAutoRenewMonths(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMonths(-12); " + AICompany.SetAutoRenewMonths(-12));
|
||||
print(" SetAutoRenewMonths(6); " + AICompany.SetAutoRenewMonths(6));
|
||||
print(" GetAutoRenewMoney(); " + AICompany.GetAutoRenewMoney(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMoney(200000); " + AICompany.SetAutoRenewMoney(200000));
|
||||
print(" GetAutoRenewMoney(); " + AICompany.GetAutoRenewMoney(AICompany.COMPANY_SELF));
|
||||
print(" SetAutoRenewMoney(200000); " + AICompany.SetAutoRenewMoney(200000));
|
||||
print(" SetAutoRenewMoney(100000); " + AICompany.SetAutoRenewMoney(100000));
|
||||
for (local i = -1; i <= AICompany.EARLIEST_QUARTER; i++) {
|
||||
print(" Quarter: " + i);
|
||||
print(" GetQuarterlyIncome(): " + AICompany.GetQuarterlyIncome(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyExpenses(): " + AICompany.GetQuarterlyExpenses(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyCargoDelivered(): " + AICompany.GetQuarterlyCargoDelivered(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyPerformanceRating(): " + AICompany.GetQuarterlyPerformanceRating(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyCompanyValue(): " + AICompany.GetQuarterlyCompanyValue(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyIncome(); " + AICompany.GetQuarterlyIncome(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyExpenses(); " + AICompany.GetQuarterlyExpenses(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyCargoDelivered(); " + AICompany.GetQuarterlyCargoDelivered(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyPerformanceRating(); " + AICompany.GetQuarterlyPerformanceRating(AICompany.COMPANY_SELF, i));
|
||||
print(" GetQuarterlyCompanyValue(); " + AICompany.GetQuarterlyCompanyValue(AICompany.COMPANY_SELF, i));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -547,8 +545,6 @@ function Regression::Prices()
|
||||
print(" BT_DOCK: " + AIMarine.GetBuildCost(AIMarine.BT_DOCK));
|
||||
print(" BT_DEPOT: " + AIMarine.GetBuildCost(AIMarine.BT_DEPOT));
|
||||
print(" BT_BUOY: " + AIMarine.GetBuildCost(AIMarine.BT_BUOY));
|
||||
print(" BT_LOCK: " + AIMarine.GetBuildCost(AIMarine.BT_LOCK));
|
||||
print(" BT_CANAL: " + AIMarine.GetBuildCost(AIMarine.BT_CANAL));
|
||||
print(" -Tile-");
|
||||
print(" BT_FOUNDATION: " + AITile.GetBuildCost(AITile.BT_FOUNDATION));
|
||||
print(" BT_TERRAFORM: " + AITile.GetBuildCost(AITile.BT_TERRAFORM));
|
||||
@@ -558,7 +554,6 @@ function Regression::Prices()
|
||||
print(" BT_CLEAR_ROCKY: " + AITile.GetBuildCost(AITile.BT_CLEAR_ROCKY));
|
||||
print(" BT_CLEAR_FIELDS: " + AITile.GetBuildCost(AITile.BT_CLEAR_FIELDS));
|
||||
print(" BT_CLEAR_HOUSE: " + AITile.GetBuildCost(AITile.BT_CLEAR_HOUSE));
|
||||
print(" BT_CLEAR_WATER: " + AITile.GetBuildCost(AITile.BT_CLEAR_WATER));
|
||||
}
|
||||
|
||||
function cost_callback(old_path, new_tile, new_direction, self) { if (old_path == null) return 0; return old_path.GetCost() + 1; }
|
||||
@@ -922,9 +917,6 @@ function Regression::Marine()
|
||||
|
||||
print(" BuildWaterDepot(): " + AIMarine.BuildWaterDepot(28479, 28480));
|
||||
print(" BuildDock(): " + AIMarine.BuildDock(29253, AIStation.STATION_JOIN_ADJACENT));
|
||||
print(" BuildBuoy(): " + AIMarine.BuildBuoy(28481));
|
||||
print(" BuildLock(): " + AIMarine.BuildLock(28487));
|
||||
print(" BuildCanal(): " + AIMarine.BuildCanal(28744));
|
||||
}
|
||||
|
||||
function Regression::Order()
|
||||
@@ -1476,41 +1468,9 @@ function Regression::TileList()
|
||||
print(" " + i + " => " + list.GetValue(i));
|
||||
}
|
||||
|
||||
list.AddRectangle(0x6F3F, 0x7248);
|
||||
list.AddRectangle(54421 - 256 * 2, 256 * 2 + 54421 + 8);
|
||||
list.Valuate(AITile.IsWaterTile);
|
||||
print(" IsWaterTile(): done");
|
||||
print(" Count(): " + list.Count());
|
||||
print(" ListDump:");
|
||||
for (local i = list.Begin(); !list.IsEnd(); i = list.Next()) {
|
||||
print(" " + i + " => " + list.GetValue(i));
|
||||
}
|
||||
|
||||
list.Valuate(AITile.IsSeaTile);
|
||||
print(" IsSeaTile(): done");
|
||||
print(" Count(): " + list.Count());
|
||||
print(" ListDump:");
|
||||
for (local i = list.Begin(); !list.IsEnd(); i = list.Next()) {
|
||||
print(" " + i + " => " + list.GetValue(i));
|
||||
}
|
||||
|
||||
list.Valuate(AITile.IsRiverTile);
|
||||
print(" IsRiverTile() done");
|
||||
print(" Count(): " + list.Count());
|
||||
print(" ListDump:");
|
||||
for (local i = list.Begin(); !list.IsEnd(); i = list.Next()) {
|
||||
print(" " + i + " => " + list.GetValue(i));
|
||||
}
|
||||
|
||||
list.Valuate(AIMarine.IsCanalTile);
|
||||
print(" IsCanalTile() done");
|
||||
print(" Count(): " + list.Count());
|
||||
print(" ListDump:");
|
||||
for (local i = list.Begin(); !list.IsEnd(); i = list.Next()) {
|
||||
print(" " + i + " => " + list.GetValue(i));
|
||||
}
|
||||
|
||||
list.Valuate(AITile.IsCoastTile);
|
||||
print(" IsCoastTile() done");
|
||||
print(" Water(): done");
|
||||
print(" Count(): " + list.Count());
|
||||
print(" ListDump:");
|
||||
for (local i = list.Begin(); !list.IsEnd(); i = list.Next()) {
|
@@ -594,7 +594,7 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetBankBalance(): 100000
|
||||
GetName(): (null : 0x00000000)
|
||||
GetLoanAmount(): 100000
|
||||
GetMaxLoanAmount(): 2000000000
|
||||
GetMaxLoanAmount(): 500000
|
||||
GetLoanInterval(): 10000
|
||||
SetLoanAmount(1): false
|
||||
SetLoanAmount(100): false
|
||||
@@ -606,8 +606,8 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetBankBalance(): 40000
|
||||
GetLoanAmount(): 40000
|
||||
SetLoanAmount(10000): true
|
||||
GetBankBalance(): 2000000000
|
||||
GetLoanAmount(): 2000000000
|
||||
GetBankBalance(): 500000
|
||||
GetLoanAmount(): 500000
|
||||
GetCompanyHQ(): -1
|
||||
BuildCompanyHQ(): true
|
||||
GetCompanyHQ(): 33151
|
||||
@@ -615,178 +615,177 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetCompanyHQ(): 33153
|
||||
BuildCompanyHQ(): false
|
||||
GetLastErrorString(): ERR_AREA_NOT_CLEAR
|
||||
GetAutoRenewStatus(): true
|
||||
SetAutoRenewStatus(true): true
|
||||
GetAutoRenewStatus(): true
|
||||
SetAutoRenewStatus(true): true
|
||||
SetAutoRenewStatus(false): true
|
||||
GetAutoRenewStatus(): false
|
||||
GetAutoRenewMonths(): 6
|
||||
SetAutoRenewMonths(-12): true
|
||||
GetAutoRenewMonths(): -12
|
||||
SetAutoRenewMonths(-12): true
|
||||
SetAutoRenewMonths(6): true
|
||||
GetAutoRenewMoney(): 100000
|
||||
SetAutoRenewMoney(200000): true
|
||||
GetAutoRenewMoney(): 200000
|
||||
SetAutoRenewMoney(200000): true
|
||||
SetAutoRenewMoney(100000): true
|
||||
GetAutoRenewStatus(); false
|
||||
SetAutoRenewStatus(true); true
|
||||
GetAutoRenewStatus(); true
|
||||
SetAutoRenewStatus(true); true
|
||||
SetAutoRenewStatus(false); true
|
||||
GetAutoRenewMonths(); 6
|
||||
SetAutoRenewMonths(-12); true
|
||||
GetAutoRenewMonths(); -12
|
||||
SetAutoRenewMonths(-12); true
|
||||
SetAutoRenewMonths(6); true
|
||||
GetAutoRenewMoney(); 100000
|
||||
SetAutoRenewMoney(200000); true
|
||||
GetAutoRenewMoney(); 200000
|
||||
SetAutoRenewMoney(200000); true
|
||||
SetAutoRenewMoney(100000); true
|
||||
Quarter: -1
|
||||
GetQuarterlyIncome(): -1
|
||||
GetQuarterlyExpenses(): -1
|
||||
GetQuarterlyCargoDelivered(): -1
|
||||
GetQuarterlyPerformanceRating(): -1
|
||||
GetQuarterlyCompanyValue(): -1
|
||||
GetQuarterlyIncome(); -1
|
||||
GetQuarterlyExpenses(); -1
|
||||
GetQuarterlyCargoDelivered(); -1
|
||||
GetQuarterlyPerformanceRating(); -1
|
||||
GetQuarterlyCompanyValue(); -1
|
||||
Quarter: 0
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): -210
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): -1
|
||||
GetQuarterlyCompanyValue(): 1
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); -210
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); -1
|
||||
GetQuarterlyCompanyValue(); 1
|
||||
Quarter: 1
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 2
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 3
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 4
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 5
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 6
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 7
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 8
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 9
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 10
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 11
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 12
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 13
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 14
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 15
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 16
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 17
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 18
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 19
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 20
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 21
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 22
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 23
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
Quarter: 24
|
||||
GetQuarterlyIncome(): 0
|
||||
GetQuarterlyExpenses(): 0
|
||||
GetQuarterlyCargoDelivered(): 0
|
||||
GetQuarterlyPerformanceRating(): 0
|
||||
GetQuarterlyCompanyValue(): 0
|
||||
GetQuarterlyIncome(); 0
|
||||
GetQuarterlyExpenses(); 0
|
||||
GetQuarterlyCargoDelivered(); 0
|
||||
GetQuarterlyPerformanceRating(); 0
|
||||
GetQuarterlyCompanyValue(); 0
|
||||
|
||||
--AIAirport--
|
||||
IsHangarTile(): false
|
||||
@@ -848,7 +847,7 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetAirportWidth(9): -1
|
||||
GetAirportHeight(9): -1
|
||||
GetAirportCoverageRadius(9): -1
|
||||
GetBankBalance(): 1999999790
|
||||
GetBankBalance(): 499790
|
||||
GetPrice(): 5400
|
||||
BuildAirport(): true
|
||||
IsHangarTile(): false
|
||||
@@ -858,11 +857,11 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
IsHangarTile(): true
|
||||
IsAirportTile(): true
|
||||
GetAirportType(): 0
|
||||
GetBankBalance(): 1999989890
|
||||
GetBankBalance(): 489890
|
||||
RemoveAirport(): true
|
||||
IsHangarTile(): false
|
||||
IsAirportTile(): false
|
||||
GetBankBalance(): 1999989626
|
||||
GetBankBalance(): 489626
|
||||
BuildAirport(): true
|
||||
|
||||
--Bridge--
|
||||
@@ -1109,7 +1108,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
--AICargo--
|
||||
Cargo -1
|
||||
IsValidCargo(): false
|
||||
GetName(): '(null : 0x00000000)'
|
||||
GetCargoLabel(): '(null : 0x00000000)'
|
||||
IsFreight(): false
|
||||
HasCargoClass(): false
|
||||
@@ -1121,7 +1119,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 1
|
||||
Cargo 0
|
||||
IsValidCargo(): true
|
||||
GetName(): 'Passengers'
|
||||
GetCargoLabel(): 'PASS'
|
||||
IsFreight(): false
|
||||
HasCargoClass(): true
|
||||
@@ -1133,7 +1130,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 0
|
||||
Cargo 1
|
||||
IsValidCargo(): true
|
||||
GetName(): 'Coal'
|
||||
GetCargoLabel(): 'COAL'
|
||||
IsFreight(): true
|
||||
HasCargoClass(): false
|
||||
@@ -1145,7 +1141,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 1
|
||||
Cargo 2
|
||||
IsValidCargo(): true
|
||||
GetName(): 'Mail'
|
||||
GetCargoLabel(): 'MAIL'
|
||||
IsFreight(): false
|
||||
HasCargoClass(): false
|
||||
@@ -1157,7 +1152,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 1
|
||||
Cargo 3
|
||||
IsValidCargo(): true
|
||||
GetName(): 'Oil'
|
||||
GetCargoLabel(): 'OIL_'
|
||||
IsFreight(): true
|
||||
HasCargoClass(): false
|
||||
@@ -1169,7 +1163,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 1
|
||||
Cargo 4
|
||||
IsValidCargo(): true
|
||||
GetName(): 'Livestock'
|
||||
GetCargoLabel(): 'LVST'
|
||||
IsFreight(): true
|
||||
HasCargoClass(): false
|
||||
@@ -1181,7 +1174,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 1
|
||||
Cargo 5
|
||||
IsValidCargo(): true
|
||||
GetName(): 'Goods'
|
||||
GetCargoLabel(): 'GOOD'
|
||||
IsFreight(): true
|
||||
HasCargoClass(): false
|
||||
@@ -1193,7 +1185,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 1
|
||||
Cargo 6
|
||||
IsValidCargo(): true
|
||||
GetName(): 'Grain'
|
||||
GetCargoLabel(): 'GRAI'
|
||||
IsFreight(): true
|
||||
HasCargoClass(): false
|
||||
@@ -1205,7 +1196,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 1
|
||||
Cargo 7
|
||||
IsValidCargo(): true
|
||||
GetName(): 'Wood'
|
||||
GetCargoLabel(): 'WOOD'
|
||||
IsFreight(): true
|
||||
HasCargoClass(): false
|
||||
@@ -1217,7 +1207,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 1
|
||||
Cargo 8
|
||||
IsValidCargo(): true
|
||||
GetName(): 'Iron Ore'
|
||||
GetCargoLabel(): 'IORE'
|
||||
IsFreight(): true
|
||||
HasCargoClass(): false
|
||||
@@ -1229,7 +1218,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 1
|
||||
Cargo 9
|
||||
IsValidCargo(): true
|
||||
GetName(): 'Steel'
|
||||
GetCargoLabel(): 'STEL'
|
||||
IsFreight(): true
|
||||
HasCargoClass(): false
|
||||
@@ -1241,7 +1229,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 1
|
||||
Cargo 10
|
||||
IsValidCargo(): true
|
||||
GetName(): 'Valuables'
|
||||
GetCargoLabel(): 'VALU'
|
||||
IsFreight(): true
|
||||
HasCargoClass(): false
|
||||
@@ -1253,7 +1240,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 1
|
||||
Cargo 11
|
||||
IsValidCargo(): false
|
||||
GetName(): '(null : 0x00000000)'
|
||||
GetCargoLabel(): '(null : 0x00000000)'
|
||||
IsFreight(): false
|
||||
HasCargoClass(): false
|
||||
@@ -1265,7 +1251,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 1
|
||||
Cargo 12
|
||||
IsValidCargo(): false
|
||||
GetName(): '(null : 0x00000000)'
|
||||
GetCargoLabel(): '(null : 0x00000000)'
|
||||
IsFreight(): false
|
||||
HasCargoClass(): false
|
||||
@@ -1277,7 +1262,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 1
|
||||
Cargo 13
|
||||
IsValidCargo(): false
|
||||
GetName(): '(null : 0x00000000)'
|
||||
GetCargoLabel(): '(null : 0x00000000)'
|
||||
IsFreight(): false
|
||||
HasCargoClass(): false
|
||||
@@ -1289,7 +1273,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetRoadVehicleTypeForCargo(): 1
|
||||
Cargo 14
|
||||
IsValidCargo(): false
|
||||
GetName(): '(null : 0x00000000)'
|
||||
GetCargoLabel(): '(null : 0x00000000)'
|
||||
IsFreight(): false
|
||||
HasCargoClass(): false
|
||||
@@ -7330,7 +7313,7 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
IsBuoyTile(): false
|
||||
IsLockTile(): false
|
||||
IsCanalTile(): false
|
||||
GetBankBalance(): 1999979304
|
||||
GetBankBalance(): 479664
|
||||
BuildWaterDepot(): true
|
||||
BuildDock(): true
|
||||
BuildBuoy(): true
|
||||
@@ -7343,7 +7326,7 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
IsBuoyTile(): true
|
||||
IsLockTile(): true
|
||||
IsCanalTile(): true
|
||||
GetBankBalance(): 1999964680
|
||||
GetBankBalance(): 465070
|
||||
|
||||
--AIWaypointList(BUOY)--
|
||||
Count(): 1
|
||||
@@ -7362,12 +7345,9 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
IsBuoyTile(): false
|
||||
IsLockTile(): false
|
||||
IsCanalTile(): false
|
||||
GetBankBalance(): 1999959285
|
||||
GetBankBalance(): 459675
|
||||
BuildWaterDepot(): true
|
||||
BuildDock(): true
|
||||
BuildBuoy(): true
|
||||
BuildLock(): true
|
||||
BuildCanal(): true
|
||||
|
||||
--Prices--
|
||||
-Rail-
|
||||
@@ -7394,8 +7374,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
BT_DOCK: 262
|
||||
BT_DEPOT: 525
|
||||
BT_BUOY: 262
|
||||
BT_LOCK: 5625
|
||||
BT_CANAL: 3750
|
||||
-Tile-
|
||||
BT_FOUNDATION: 187
|
||||
BT_TERRAFORM: 187
|
||||
@@ -7405,7 +7383,6 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
BT_CLEAR_ROCKY: 150
|
||||
BT_CLEAR_FIELDS: 375
|
||||
BT_CLEAR_HOUSE: 1200
|
||||
BT_CLEAR_WATER: 7500
|
||||
|
||||
--Rail--
|
||||
IsRailTile(): false
|
||||
@@ -8456,221 +8433,54 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
KeepValue(1): done
|
||||
Count(): 0
|
||||
ListDump:
|
||||
IsWaterTile(): done
|
||||
Count(): 40
|
||||
Water(): done
|
||||
Count(): 45
|
||||
ListDump:
|
||||
29251 => 1
|
||||
29250 => 1
|
||||
29249 => 1
|
||||
29248 => 1
|
||||
29247 => 1
|
||||
28996 => 1
|
||||
28995 => 1
|
||||
28994 => 1
|
||||
28993 => 1
|
||||
28992 => 1
|
||||
28991 => 1
|
||||
28744 => 1
|
||||
28741 => 1
|
||||
28740 => 1
|
||||
28739 => 1
|
||||
28738 => 1
|
||||
28737 => 1
|
||||
28736 => 1
|
||||
28735 => 1
|
||||
28488 => 1
|
||||
28487 => 1
|
||||
28486 => 1
|
||||
28485 => 1
|
||||
28484 => 1
|
||||
28483 => 1
|
||||
28482 => 1
|
||||
28480 => 1
|
||||
28479 => 1
|
||||
29256 => 0
|
||||
29255 => 0
|
||||
29254 => 0
|
||||
29253 => 0
|
||||
29252 => 0
|
||||
29000 => 0
|
||||
28999 => 0
|
||||
28998 => 0
|
||||
28997 => 0
|
||||
28743 => 0
|
||||
28742 => 0
|
||||
28481 => 0
|
||||
IsSeaTile(): done
|
||||
Count(): 40
|
||||
ListDump:
|
||||
29251 => 1
|
||||
29250 => 1
|
||||
29249 => 1
|
||||
29248 => 1
|
||||
29247 => 1
|
||||
28996 => 1
|
||||
28995 => 1
|
||||
28994 => 1
|
||||
28993 => 1
|
||||
28992 => 1
|
||||
28991 => 1
|
||||
28741 => 1
|
||||
28740 => 1
|
||||
28739 => 1
|
||||
28738 => 1
|
||||
28737 => 1
|
||||
28736 => 1
|
||||
28735 => 1
|
||||
28485 => 1
|
||||
28484 => 1
|
||||
28483 => 1
|
||||
28482 => 1
|
||||
29256 => 0
|
||||
29255 => 0
|
||||
29254 => 0
|
||||
29253 => 0
|
||||
29252 => 0
|
||||
29000 => 0
|
||||
28999 => 0
|
||||
28998 => 0
|
||||
28997 => 0
|
||||
28744 => 0
|
||||
28743 => 0
|
||||
28742 => 0
|
||||
28488 => 0
|
||||
28487 => 0
|
||||
28486 => 0
|
||||
28481 => 0
|
||||
28480 => 0
|
||||
28479 => 0
|
||||
IsRiverTile() done
|
||||
Count(): 40
|
||||
ListDump:
|
||||
29256 => 0
|
||||
29255 => 0
|
||||
29254 => 0
|
||||
29253 => 0
|
||||
29252 => 0
|
||||
29251 => 0
|
||||
29250 => 0
|
||||
29249 => 0
|
||||
29248 => 0
|
||||
29247 => 0
|
||||
29000 => 0
|
||||
28999 => 0
|
||||
28998 => 0
|
||||
28997 => 0
|
||||
28996 => 0
|
||||
28995 => 0
|
||||
28994 => 0
|
||||
28993 => 0
|
||||
28992 => 0
|
||||
28991 => 0
|
||||
28744 => 0
|
||||
28743 => 0
|
||||
28742 => 0
|
||||
28741 => 0
|
||||
28740 => 0
|
||||
28739 => 0
|
||||
28738 => 0
|
||||
28737 => 0
|
||||
28736 => 0
|
||||
28735 => 0
|
||||
28488 => 0
|
||||
28487 => 0
|
||||
28486 => 0
|
||||
28485 => 0
|
||||
28484 => 0
|
||||
28483 => 0
|
||||
28482 => 0
|
||||
28481 => 0
|
||||
28480 => 0
|
||||
28479 => 0
|
||||
IsCanalTile() done
|
||||
Count(): 40
|
||||
ListDump:
|
||||
28744 => 1
|
||||
29256 => 0
|
||||
29255 => 0
|
||||
29254 => 0
|
||||
29253 => 0
|
||||
29252 => 0
|
||||
29251 => 0
|
||||
29250 => 0
|
||||
29249 => 0
|
||||
29248 => 0
|
||||
29247 => 0
|
||||
29000 => 0
|
||||
28999 => 0
|
||||
28998 => 0
|
||||
28997 => 0
|
||||
28996 => 0
|
||||
28995 => 0
|
||||
28994 => 0
|
||||
28993 => 0
|
||||
28992 => 0
|
||||
28991 => 0
|
||||
28743 => 0
|
||||
28742 => 0
|
||||
28741 => 0
|
||||
28740 => 0
|
||||
28739 => 0
|
||||
28738 => 0
|
||||
28737 => 0
|
||||
28736 => 0
|
||||
28735 => 0
|
||||
28488 => 0
|
||||
28487 => 0
|
||||
28486 => 0
|
||||
28485 => 0
|
||||
28484 => 0
|
||||
28483 => 0
|
||||
28482 => 0
|
||||
28481 => 0
|
||||
28480 => 0
|
||||
28479 => 0
|
||||
IsCoastTile() done
|
||||
Count(): 40
|
||||
ListDump:
|
||||
28998 => 1
|
||||
28997 => 1
|
||||
28743 => 1
|
||||
28742 => 1
|
||||
29256 => 0
|
||||
29255 => 0
|
||||
29254 => 0
|
||||
29253 => 0
|
||||
29252 => 0
|
||||
29251 => 0
|
||||
29250 => 0
|
||||
29249 => 0
|
||||
29248 => 0
|
||||
29247 => 0
|
||||
29000 => 0
|
||||
28999 => 0
|
||||
28996 => 0
|
||||
28995 => 0
|
||||
28994 => 0
|
||||
28993 => 0
|
||||
28992 => 0
|
||||
28991 => 0
|
||||
28744 => 0
|
||||
28741 => 0
|
||||
28740 => 0
|
||||
28739 => 0
|
||||
28738 => 0
|
||||
28737 => 0
|
||||
28736 => 0
|
||||
28735 => 0
|
||||
28488 => 0
|
||||
28487 => 0
|
||||
28486 => 0
|
||||
28485 => 0
|
||||
28484 => 0
|
||||
28483 => 0
|
||||
28482 => 0
|
||||
28481 => 0
|
||||
28480 => 0
|
||||
28479 => 0
|
||||
54941 => 1
|
||||
54940 => 1
|
||||
54939 => 1
|
||||
54938 => 1
|
||||
54937 => 1
|
||||
54936 => 1
|
||||
54935 => 1
|
||||
54934 => 1
|
||||
54933 => 1
|
||||
54685 => 1
|
||||
54684 => 1
|
||||
54683 => 1
|
||||
54682 => 1
|
||||
54681 => 1
|
||||
54680 => 1
|
||||
54679 => 1
|
||||
54678 => 1
|
||||
54677 => 1
|
||||
54429 => 1
|
||||
54428 => 1
|
||||
54427 => 1
|
||||
54426 => 1
|
||||
54425 => 1
|
||||
54424 => 1
|
||||
54423 => 1
|
||||
54422 => 1
|
||||
54421 => 1
|
||||
54173 => 1
|
||||
54172 => 1
|
||||
54171 => 1
|
||||
54170 => 1
|
||||
54169 => 1
|
||||
54168 => 0
|
||||
54167 => 0
|
||||
54166 => 0
|
||||
54165 => 0
|
||||
53917 => 0
|
||||
53916 => 0
|
||||
53915 => 0
|
||||
53914 => 0
|
||||
53913 => 0
|
||||
53912 => 0
|
||||
53911 => 0
|
||||
53910 => 0
|
||||
53909 => 0
|
||||
|
||||
--TileList_IndustryAccepting--
|
||||
Count(): 47
|
||||
@@ -9264,7 +9074,7 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
--Accounting--
|
||||
GetCosts(): -5947
|
||||
Should be: -5947
|
||||
GetName(): Road Vehicle #1
|
||||
GetName(): Road Vehicle 1
|
||||
SetName(): true
|
||||
GetName(): MyVehicleName
|
||||
CloneVehicle(): 13
|
||||
@@ -9272,12 +9082,12 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetLocation(): 33417
|
||||
GetEngineType(): 153
|
||||
GetUnitNumber(): 1
|
||||
GetAge(): 1
|
||||
GetAge(): 0
|
||||
GetMaxAge(): 5490
|
||||
GetAgeLeft(): 5489
|
||||
GetAgeLeft(): 5490
|
||||
GetCurrentSpeed(): 7
|
||||
GetRunningCost(): 421
|
||||
GetProfitThisYear(): -1
|
||||
GetProfitThisYear(): 0
|
||||
GetProfitLastYear(): 0
|
||||
GetCurrentValue(): 5947
|
||||
GetVehicleType(): 1
|
||||
@@ -9287,7 +9097,7 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
IsInDepot(): false
|
||||
GetNumWagons(): 1
|
||||
GetWagonEngineType(): 153
|
||||
GetWagonAge(): 1
|
||||
GetWagonAge(): 0
|
||||
GetLength(): 8
|
||||
GetOwner(): 1
|
||||
BuildVehicle(): 14
|
||||
@@ -9312,9 +9122,9 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
GetNumWagons(): 3
|
||||
GetLength(): 24
|
||||
GetWagonEngineType(): 9
|
||||
GetWagonAge(): 0
|
||||
GetWagonAge(): 1
|
||||
GetWagonEngineType(): 27
|
||||
GetWagonAge(): 0
|
||||
GetWagonAge(): 1
|
||||
GetWagonEngineType(): 27
|
||||
GetWagonAge(): 0
|
||||
GetWagonEngineType(): 65535
|
||||
@@ -9360,11 +9170,11 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
14 => 1
|
||||
12 => 1
|
||||
Age ListDump:
|
||||
17 => 1
|
||||
16 => 1
|
||||
14 => 1
|
||||
13 => 1
|
||||
12 => 1
|
||||
17 => 0
|
||||
16 => 0
|
||||
14 => 0
|
||||
MaxAge ListDump:
|
||||
16 => 10980
|
||||
14 => 10980
|
||||
@@ -9372,9 +9182,9 @@ ERROR: IsEnd() is invalid as Begin() is never called
|
||||
13 => 5490
|
||||
12 => 5490
|
||||
AgeLeft ListDump:
|
||||
16 => 10980
|
||||
14 => 10980
|
||||
17 => 7320
|
||||
16 => 10979
|
||||
14 => 10979
|
||||
17 => 7319
|
||||
13 => 5489
|
||||
12 => 5489
|
||||
CurrentSpeed ListDump:
|
@@ -1,9 +1,9 @@
|
||||
class StationList extends AIController {
|
||||
class Regression extends AIController {
|
||||
function Start();
|
||||
};
|
||||
|
||||
|
||||
function StationList::StationList()
|
||||
function Regression::StationList()
|
||||
{
|
||||
local list = AIStationList(AIStation.STATION_BUS_STOP + AIStation.STATION_TRUCK_STOP);
|
||||
|
||||
@@ -27,7 +27,7 @@ function StationList::StationList()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_Cargo()
|
||||
function Regression::StationList_Cargo()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_Cargo--");
|
||||
@@ -44,7 +44,7 @@ function StationList::StationList_Cargo()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoPlanned()
|
||||
function Regression::StationList_CargoPlanned()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoPlanned--");
|
||||
@@ -58,7 +58,7 @@ function StationList::StationList_CargoPlanned()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoPlannedByFrom()
|
||||
function Regression::StationList_CargoPlannedByFrom()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoPlannedByFrom--");
|
||||
@@ -68,7 +68,7 @@ function StationList::StationList_CargoPlannedByFrom()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoPlannedByVia()
|
||||
function Regression::StationList_CargoPlannedByVia()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoPlannedByVia--");
|
||||
@@ -78,7 +78,7 @@ function StationList::StationList_CargoPlannedByVia()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoPlannedViaByFrom()
|
||||
function Regression::StationList_CargoPlannedViaByFrom()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoPlannedViaByFrom--");
|
||||
@@ -88,7 +88,7 @@ function StationList::StationList_CargoPlannedViaByFrom()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoPlannedFromByVia()
|
||||
function Regression::StationList_CargoPlannedFromByVia()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoPlannedFromByVia--");
|
||||
@@ -98,7 +98,7 @@ function StationList::StationList_CargoPlannedFromByVia()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoWaiting()
|
||||
function Regression::StationList_CargoWaiting()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoWaiting--");
|
||||
@@ -112,7 +112,7 @@ function StationList::StationList_CargoWaiting()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoWaitingByFrom()
|
||||
function Regression::StationList_CargoWaitingByFrom()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoWaitingByFrom--");
|
||||
@@ -122,7 +122,7 @@ function StationList::StationList_CargoWaitingByFrom()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoWaitingByVia()
|
||||
function Regression::StationList_CargoWaitingByVia()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoWaitingByVia--");
|
||||
@@ -132,7 +132,7 @@ function StationList::StationList_CargoWaitingByVia()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoWaitingViaByFrom()
|
||||
function Regression::StationList_CargoWaitingViaByFrom()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoWaitingViaByFrom--");
|
||||
@@ -142,7 +142,7 @@ function StationList::StationList_CargoWaitingViaByFrom()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_CargoWaitingFromByVia()
|
||||
function Regression::StationList_CargoWaitingFromByVia()
|
||||
{
|
||||
print("");
|
||||
print("--StationList_CargoWaitingFromByVia--");
|
||||
@@ -152,7 +152,7 @@ function StationList::StationList_CargoWaitingFromByVia()
|
||||
}
|
||||
};
|
||||
|
||||
function StationList::StationList_Vehicle()
|
||||
function Regression::StationList_Vehicle()
|
||||
{
|
||||
local list = AIStationList_Vehicle(12);
|
||||
|
||||
@@ -196,7 +196,7 @@ function StationList::StationList_Vehicle()
|
||||
}
|
||||
}
|
||||
|
||||
function StationList::Start()
|
||||
function Regression::Start()
|
||||
{
|
||||
StationList();
|
||||
StationList_Cargo();
|
BIN
bin/ai/regression/tst_stationlist/test.sav
Normal file
BIN
bin/ai/regression/tst_stationlist/test.sav
Normal file
Binary file not shown.
Binary file not shown.
BIN
bin/baseset/opntitle.dat
Normal file
BIN
bin/baseset/opntitle.dat
Normal file
Binary file not shown.
@@ -1,37 +0,0 @@
|
||||
set(GS_COMPAT_SOURCE_FILES
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.2.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.3.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.4.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.5.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.6.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.7.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.8.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.9.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.10.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_1.11.nut
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/compat_12.nut
|
||||
)
|
||||
|
||||
foreach(GS_COMPAT_SOURCE_FILE IN LISTS GS_COMPAT_SOURCE_FILES)
|
||||
string(REPLACE "${CMAKE_SOURCE_DIR}/bin/" "" GS_COMPAT_SOURCE_FILE_NAME "${GS_COMPAT_SOURCE_FILE}")
|
||||
string(CONCAT GS_COMPAT_BINARY_FILE "${CMAKE_BINARY_DIR}/" "${GS_COMPAT_SOURCE_FILE_NAME}")
|
||||
|
||||
add_custom_command(OUTPUT ${GS_COMPAT_BINARY_FILE}
|
||||
COMMAND ${CMAKE_COMMAND} -E copy
|
||||
${GS_COMPAT_SOURCE_FILE}
|
||||
${GS_COMPAT_BINARY_FILE}
|
||||
MAIN_DEPENDENCY ${GS_COMPAT_SOURCE_FILE}
|
||||
COMMENT "Copying ${GS_COMPAT_SOURCE_FILE_NAME}"
|
||||
)
|
||||
|
||||
list(APPEND GS_COMPAT_BINARY_FILES ${GS_COMPAT_BINARY_FILE})
|
||||
endforeach()
|
||||
|
||||
# Create a new target which copies all compat files
|
||||
add_custom_target(gs_compat_files
|
||||
DEPENDS ${GS_COMPAT_BINARY_FILES}
|
||||
)
|
||||
|
||||
add_dependencies(openttd
|
||||
gs_compat_files
|
||||
)
|
@@ -4,12 +4,3 @@
|
||||
* OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
* See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
GSLog.Info("1.10 API compatibility in effect.");
|
||||
|
||||
/* 1.11 adds a tile parameter. */
|
||||
GSCompany._ChangeBankBalance <- GSCompany.ChangeBankBalance;
|
||||
GSCompany.ChangeBankBalance <- function(company, delta, expenses_type)
|
||||
{
|
||||
return GSCompany._ChangeBankBalance(company, delta, expenses_type, GSMap.TILE_INVALID);
|
||||
}
|
||||
|
@@ -1,8 +0,0 @@
|
||||
/*
|
||||
* This file is part of OpenTTD.
|
||||
* OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
* OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
* See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
GSLog.Info("1.11 API compatibility in effect.");
|
@@ -28,10 +28,3 @@ GSBridge.GetName <- function(bridge_id)
|
||||
{
|
||||
return GSBridge._GetName(bridge_id, GSVehicle.VT_RAIL);
|
||||
}
|
||||
|
||||
/* 1.11 adds a tile parameter. */
|
||||
GSCompany._ChangeBankBalance <- GSCompany.ChangeBankBalance;
|
||||
GSCompany.ChangeBankBalance <- function(company, delta, expenses_type)
|
||||
{
|
||||
return GSCompany._ChangeBankBalance(company, delta, expenses_type, GSMap.TILE_INVALID);
|
||||
}
|
||||
|
@@ -28,10 +28,3 @@ GSBridge.GetName <- function(bridge_id)
|
||||
{
|
||||
return GSBridge._GetName(bridge_id, GSVehicle.VT_RAIL);
|
||||
}
|
||||
|
||||
/* 1.11 adds a tile parameter. */
|
||||
GSCompany._ChangeBankBalance <- GSCompany.ChangeBankBalance;
|
||||
GSCompany.ChangeBankBalance <- function(company, delta, expenses_type)
|
||||
{
|
||||
return GSCompany._ChangeBankBalance(company, delta, expenses_type, GSMap.TILE_INVALID);
|
||||
}
|
||||
|
@@ -20,11 +20,3 @@ GSBridge.GetName <- function(bridge_id)
|
||||
{
|
||||
return GSBridge._GetName(bridge_id, GSVehicle.VT_RAIL);
|
||||
}
|
||||
|
||||
/* 1.11 adds a tile parameter. */
|
||||
GSCompany._ChangeBankBalance <- GSCompany.ChangeBankBalance;
|
||||
GSCompany.ChangeBankBalance <- function(company, delta, expenses_type)
|
||||
{
|
||||
return GSCompany._ChangeBankBalance(company, delta, expenses_type, GSMap.TILE_INVALID);
|
||||
}
|
||||
|
||||
|
@@ -13,10 +13,3 @@ GSBridge.GetName <- function(bridge_id)
|
||||
{
|
||||
return GSBridge._GetName(bridge_id, GSVehicle.VT_RAIL);
|
||||
}
|
||||
|
||||
/* 1.11 adds a tile parameter. */
|
||||
GSCompany._ChangeBankBalance <- GSCompany.ChangeBankBalance;
|
||||
GSCompany.ChangeBankBalance <- function(company, delta, expenses_type)
|
||||
{
|
||||
return GSCompany._ChangeBankBalance(company, delta, expenses_type, GSMap.TILE_INVALID);
|
||||
}
|
||||
|
@@ -13,10 +13,3 @@ GSBridge.GetName <- function(bridge_id)
|
||||
{
|
||||
return GSBridge._GetName(bridge_id, GSVehicle.VT_RAIL);
|
||||
}
|
||||
|
||||
/* 1.11 adds a tile parameter. */
|
||||
GSCompany._ChangeBankBalance <- GSCompany.ChangeBankBalance;
|
||||
GSCompany.ChangeBankBalance <- function(company, delta, expenses_type)
|
||||
{
|
||||
return GSCompany._ChangeBankBalance(company, delta, expenses_type, GSMap.TILE_INVALID);
|
||||
}
|
||||
|
@@ -13,10 +13,3 @@ GSBridge.GetName <- function(bridge_id)
|
||||
{
|
||||
return GSBridge._GetName(bridge_id, GSVehicle.VT_RAIL);
|
||||
}
|
||||
|
||||
/* 1.11 adds a tile parameter. */
|
||||
GSCompany._ChangeBankBalance <- GSCompany.ChangeBankBalance;
|
||||
GSCompany.ChangeBankBalance <- function(company, delta, expenses_type)
|
||||
{
|
||||
return GSCompany._ChangeBankBalance(company, delta, expenses_type, GSMap.TILE_INVALID);
|
||||
}
|
||||
|
@@ -13,10 +13,3 @@ GSBridge.GetName <- function(bridge_id)
|
||||
{
|
||||
return GSBridge._GetName(bridge_id, GSVehicle.VT_RAIL);
|
||||
}
|
||||
|
||||
/* 1.11 adds a tile parameter. */
|
||||
GSCompany._ChangeBankBalance <- GSCompany.ChangeBankBalance;
|
||||
GSCompany.ChangeBankBalance <- function(company, delta, expenses_type)
|
||||
{
|
||||
return GSCompany._ChangeBankBalance(company, delta, expenses_type, GSMap.TILE_INVALID);
|
||||
}
|
||||
|
@@ -6,10 +6,3 @@
|
||||
*/
|
||||
|
||||
GSLog.Info("1.9 API compatibility in effect.");
|
||||
|
||||
/* 1.11 adds a tile parameter. */
|
||||
GSCompany._ChangeBankBalance <- GSCompany.ChangeBankBalance;
|
||||
GSCompany.ChangeBankBalance <- function(company, delta, expenses_type)
|
||||
{
|
||||
return GSCompany._ChangeBankBalance(company, delta, expenses_type, GSMap.TILE_INVALID);
|
||||
}
|
||||
|
@@ -1,6 +0,0 @@
|
||||
/*
|
||||
* This file is part of OpenTTD.
|
||||
* OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
|
||||
* OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
* See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
448
changelog.txt
448
changelog.txt
@@ -1,437 +1,3 @@
|
||||
12.0-RC1 (2021-09-25)
|
||||
------------------------------------------------------------------------
|
||||
Feature: Display icon/text whether vehicle is lost in vehicle (list) window (#9543)
|
||||
Feature: [MacOS] Add selected toolbar buttons to MacBook Pro Touch Bar (#9511)
|
||||
Feature: Button to open order window from vehicle shared orders window (#9325)
|
||||
Feature: Ctrl-Clicking shared order vehicle list opens order window (#9325)
|
||||
Feature: Multiple rotating views on title screen (#8980)
|
||||
Feature: Hide block signals in GUI by default (#8688)
|
||||
Add: [Script] Allow GameScripts to build neutral objects (#9568)
|
||||
Add: [Network] Allow sending chat messages via admin port (#9563)
|
||||
Add: [AI/GS] Missing water related functions and objects (#8390)
|
||||
Fix: Industry funding window did not update when changing funding method (#9572)
|
||||
Fix #9562: [NewGRF] Handle case of invalid Action2 with zero results (#9564)
|
||||
Fix: Incorrect error messages when placing water in scenario editor (#9560)
|
||||
Fix #9484: Update locale currencies settings config map (#9559)
|
||||
Fix: Prevent train reversing when entirely inside a train depot (#9557)
|
||||
Fix: [Network] Add back 'Spectate' option to company toolbar menu (#9556)
|
||||
Fix #9463: [Win32] Work around XAudio2 crashes (#9549)
|
||||
Fix #8603: Don't give focus to text filter when opening Object GUI (#9547)
|
||||
Fix #9241: Grove and forest tree brushes did not also create rainforest terrain (#9542)
|
||||
Fix: [Network] Several crashes in our network code (#9534, #9456)
|
||||
Fix #9527: Crash when trying to place multi-tile objects at map edge (#9529)
|
||||
Fix: [Network] SendCmdNames only sent one name per packet (#9528)
|
||||
Fix #9407: Desync when founding a town nearby a station (#9526)
|
||||
Fix #9521: Don't load at just removed docks that were part of a multi-dock station (#9524)
|
||||
Fix: Ships always tried to avoid docking tiles when pathfinding (even if nothing was on them) (#9522)
|
||||
Fix: [Network] Convert server_advertise to server_game_type in config file (#9515)
|
||||
Fix #9490: [Network] A full server couldn't be queried (#9508)
|
||||
Fix: [Network] Don't show GameScript " (v0)" for old servers (#9507)
|
||||
Fix: [Network] Show query errors in the server listing instead of error popup (#9506)
|
||||
Fix: [Network] Crash when last-joined server was no longer available (#9503)
|
||||
Fix #9501: [Network] Crash when more than one game-info query was pending (#9502)
|
||||
Fix: Wrong error message when building canals over ship depots / locks (#9410)
|
||||
Fix: Reduce cost of building canals over objects on sea (#9410)
|
||||
Change: [Linkgraph] Delete links only served by vehicles stopped in depot (#9499)
|
||||
|
||||
|
||||
12.0-beta2 (2021-08-19)
|
||||
------------------------------------------------------------------------
|
||||
Feature: [Linkgraph] Prioritize faster routes for passengers, mail and express cargo (#9457)
|
||||
Fix: Wrong town window refreshed when building an airport with noise levels enabled (#9497)
|
||||
Fix: Improve wording of network-related messages (#9494, #9495, #9500)
|
||||
Fix: [Network] Report reuse of invite-code (#9487)
|
||||
Fix: [Network] Connecting with the same client name thrice hangs the server (#9485)
|
||||
|
||||
|
||||
12.0-beta1 (2021-08-15)
|
||||
------------------------------------------------------------------------
|
||||
Feature: [Network] Remove lobby window; pressing "Join Game" now immediately joins a server (#9467)
|
||||
Feature: [Network] Synchronize server name to clients and display in Online Players window (#9472)
|
||||
Feature: [Network] Mention you are a spectator in the status bar (#9471)
|
||||
Feature: [Network] No longer require port-forwarding to host a server (#9443, #9447)
|
||||
Feature: [Network] Allow setting your server visibility to "invite-only" (#9434)
|
||||
Feature: [Network] Join servers based on their invite code (#9432)
|
||||
Feature: Raise the maximum NewGRF limit to 255 (#9428)
|
||||
Feature: Persistent rotation of numbered auto/netsave after restart (#9395, #9397)
|
||||
Feature: [NewGRF] Maximum curve speed modifier for rail vehicles (#9346)
|
||||
Feature: Move sensitive information to secrets.cfg and private information to private.cfg (#9298)
|
||||
Feature: Signed Windows builds (#9294)
|
||||
Feature: [NewGRF] Define refittability of default vehicles using cargo classes (#9148)
|
||||
Feature: Configurable subsidy duration, up to 5000 years (#9081)
|
||||
Feature: [Network] Rework in-game Online Players window (#9067)
|
||||
Feature: [Network] Show previous chat history when the chat message box is open (#9025)
|
||||
Feature: Button to reset game settings to their default values (#8958)
|
||||
Feature: Press Ctrl to build diagonal rivers in Scenario Editor (#8880)
|
||||
Feature: Set wagon replacement per group when using autoreplace (#7441)
|
||||
Add: [Network] Open Online Players window on starting/joining a server (#9479)
|
||||
Add: [Script] Basic information about loaded NewGRFs for scripts (#9464)
|
||||
Add: [AI] Get the number of vehicles in a given group (#9462)
|
||||
Add: [Network] Inform network clients what game-script a server is running (#9441)
|
||||
Add: Hindi translation (#9086)
|
||||
Add: [Network] Ensure players fill in a name instead of defaulting to "Player" (#9080)
|
||||
Change: Allow pause/unpause console command in single player (#9342)
|
||||
Change: Make savegame format self-descriptive and consistent across all objects (#9322, #9335, #9338, #9339)
|
||||
Change: By default, make "unload all" leave stations empty (#9301)
|
||||
Change: Reworked the debug levels and messages for network logs (#9230, #9251)
|
||||
Change: [Emscripten] Set default scrolling mode to non-pointer-locking (#9191)
|
||||
Change: Use neutral pronouns for various strings (#9189, #9203, #9228)
|
||||
Change: Make the town directory horizontally resizable (#9157)
|
||||
Change: Allow non-ASCII currency separators (#9121)
|
||||
Change: [NewGRF] Display a pop-up window for Errors with severity ERROR (#9119)
|
||||
Change: Treat languages as finished, if translations are 75% completed (#9019, #9086)
|
||||
Change: Disable NewGRF window apply button if no change was made (#8934)
|
||||
Fix: [Script] Crash when iterating lists of which the key is larger than 32bit (#9465)
|
||||
Fix: [Network] Desync due to use of unstable sort when distributing cargo production (#9460)
|
||||
Fix #9440: Negative cargo payments not being handled correctly (#9455)
|
||||
Fix: [Network] Crash when joining a server again after a TCP disconnect (#9453)
|
||||
Fix: Don't enable rename button for network clients in build vehicle window (#9452)
|
||||
Fix: Money could underflow and wrap around (#9451)
|
||||
Fix: Parse the console settings the same way as config settings (#9438)
|
||||
Fix: Ensure no more than the allowed number of NewGRFs are loaded from the configuration (#9430)
|
||||
Fix: [NewGRF] Overflow when determining cargo mask for string code 9A 1E (#9423)
|
||||
Fix: Integers for scripts are 64bit, but saved as 32bit (#9415)
|
||||
Fix #9392: [Script] Return a valid value with GetBuildWithRefitCapacity even when AIs are maxed out in vehicles (#9393)
|
||||
Fix #8169: Crash when autoreplacing vehicle with no orders (#9387)
|
||||
Fix: Wrong cargo line position in IndustryCargo window (#9383)
|
||||
Fix: Race-condition during startup of NewGRF scan (#9382)
|
||||
Fix: Don't propagate Shift/Ctrl state till next game-tick (#9381)
|
||||
Fix: Prevent palette updates during copying to the video driver (#9379)
|
||||
Fix: [Network] Determining GetNetworkRevisionString could overflow and underflow its buffer (#9372)
|
||||
Fix #9358: Don't skip empty files in tar archives (#9367)
|
||||
Fix: For old savegames, station bus/truck station cache was not updated (#9366)
|
||||
Fix #9353: [Script] Garbage collecting on priority queues could crash the game (#9356)
|
||||
Fix: Respect the autosave_on_exit setting for Null video driver (#9343)
|
||||
Fix: Compatible NewGRFs in crash-log reported wrong MD5 hash (#9340)
|
||||
Fix: [Script] Ensure the saved script strings are properly validated and terminated (#9336)
|
||||
Fix #9316: Town bridge length limit check incorrect above 250k inhabitants (#9318)
|
||||
Fix: Limit heightmap sizes to 8192x8192 (#9307)
|
||||
Fix #9281: Money generating exploit when buying out a company (#9300)
|
||||
Fix: Part of a tile might not be redrawn when terraforming (#9296)
|
||||
Fix: [OpenGL] Increase timeout when waiting for the GPU to be done with the drawing buffer (#9282)
|
||||
Fix: Vehicles sent in the wrong direction if there is no path to the destination (#9280)
|
||||
Fix #9264: Do not attach temporary wagons to free wagon chains when autoreplacing (#9278)
|
||||
Fix #9267: [Script] Crash during garbage collection (#9275)
|
||||
Fix: Encountering two-way red signals could prune unrelated Pathfinder branches (#9271)
|
||||
Fix #9255: [Network] Crash when hostname is not found (#9259)
|
||||
Fix #9256: Invalid read after free when replacing train chains (#9258)
|
||||
Fix: [Emscripten] Force secure WebSockets over HTTPS (#9248)
|
||||
Fix #9242: Tree tick handler did not scale by map size (#9246)
|
||||
Fix: [Network] Mark server as offline when no longer reachable (#9244)
|
||||
Fix: [Network] Don't rebuild the host-list during iterating the list (#9240)
|
||||
Fix: [Network] Don't mark the last-joined server as a manually added server (#9239)
|
||||
Fix: [Network] Clients leaving because of broken connections was not broadcasted (#9238)
|
||||
Fix: [Network] Check on CIDR for netmask check considered everything valid (#9235)
|
||||
Fix: Creating screenshots on dedicated servers failed (#9232)
|
||||
Fix: Leaking file descriptors for downloaded content (#9229)
|
||||
Fix: Spelling of several town names (#9222)
|
||||
Fix #9209: Game hangs when resizing highscore/news window if the screen is too small (#9210)
|
||||
Fix: [Network] Optimize creating network connections for clients using IPv4 and IPv6 (#9199)
|
||||
Fix #9186: Fix incorrect bounding box height causing station sprite glitch (#9187)
|
||||
Fix: Truncating strings in settings could leave invalid UTF-8 characters (#9121)
|
||||
Fix: Many issues related to window scaling (#9087, #9219)
|
||||
Fix: Invalidate cached vehicle colourmaps when changing liveries setting (#9006)
|
||||
Fix #8981: Don't attempt to re-reserve path if already entering/entered depot (#9000)
|
||||
Fix: Missing 'Town names:' colon in map gen GUI (#8986)
|
||||
Fix: Sorting and filtering industries that produce/accept many cargoes (#8468)
|
||||
Remove: [Network] COMPANY_INFO packets (#9475)
|
||||
Remove: [Network] A server can no longer set a limit to the amount of spectators allowed (#9466)
|
||||
Remove: Arbitrary limit on number of statically loaded NewGRFs (#9431)
|
||||
Remove: [Network] Language and map-name from server information (#9070)
|
||||
|
||||
|
||||
1.11.2 (2021-05-03)
|
||||
------------------------------------------------------------------------
|
||||
Change: [Win32] Limit hardware accelerated video driver to OpenGL 3.2 or higher (#9077)
|
||||
Change: More improvements to the GUI at different scales (#9075, #9102, #9107, #9133, #9174, #9183)
|
||||
Fix: Query windows could be partially drawn (#9184)
|
||||
Fix #9113: Crash when removing an airport that exists in an aircraft's orders (#9182)
|
||||
Fix #9117: [Fluidsynth] Hang when changing song (#9181)
|
||||
Fix: String validation could leave invalid UTF-8 encoded strings (#9096)
|
||||
Fix: [Network] Out-of-bounds memory access with modified servers sending too short password salts (#9176)
|
||||
Fix: Crash when extra viewport with zero height has sign in view (#9175)
|
||||
Fix #9147: Crash when taking screenshots (#9169)
|
||||
Fix #6598: [Network] Prevent crashes when (re)joining network game by falling back to main menu first (#9163)
|
||||
Fix #9152: Screenshot success popup window was treated as an error (#9159)
|
||||
Fix: Fast-forward stuttering when vsync is enabled (#9140)
|
||||
Fix: [Network, Win32] Network errors were handled badly (#9116)
|
||||
Fix: [Network] Savegame transfer could stall in rare cases (#9106)
|
||||
Fix #9097: [NewGRF] Cargo initial payment variable was being truncated (#9098)
|
||||
Fix: [NewGRF] Industry variable 66 and object variable 46 erroneously truncated the distance (#9088)
|
||||
Fix: [NewGRF] Industry variables 65 and 66 ignored the parameter, and always used the north tile (#9088)
|
||||
Fix: Do not include regression test AI in bundle (#9068, #9164)
|
||||
Fix #9062: [Win32] Version in executable was not set to current release version (#9066, #9154)
|
||||
|
||||
|
||||
1.11.1 (2021-04-18)
|
||||
------------------------------------------------------------------------
|
||||
Feature: Toggle to enable/disable vsync (#8997)
|
||||
Feature: Volume controls in the Game Options window, and better defaults (#8943)
|
||||
Add: Hotkey to focus object and rail filters (#8908)
|
||||
Add: Better plural support for Romanian (#8936)
|
||||
Change: Improve layout and spacing of several windows at different GUI scales (#9041, #9042, #9044, #9050)
|
||||
Change: [Win32] Use user UI language setting for initial language selection (#8974)
|
||||
Change: Make effect volume scale more intuitively (#8945, #8950)
|
||||
Change: Improve padding of Object & Rail station windows (#8929)
|
||||
Fix #6322: [Script] Crash when script allocates too much memory, now kills script instead (#9047)
|
||||
Fix #7513: [Script] Crash on garbage collection with misbehaving script (#9040)
|
||||
Fix #9028: [OpenGL] Crash when changing max sprite zoom level (#9032)
|
||||
Fix #8874: show a warning when a NewGRF scan is requested multiple times (#9022)
|
||||
Fix: Desync when GS unlocks railtype with wagon unlock (#9021)
|
||||
Fix #9015: [Win32] Crash on running "pwd" command in the console (#9016)
|
||||
Fix #9008: Validate starting year given on the command line (-t) (#9014)
|
||||
Fix #8878: [Network] Slow DNS queries could block the server and disconnect clients (#9013)
|
||||
Fix: Improve validation of OpenGL video driver to avoid crashes (#9007)
|
||||
Fix: Credits scrolled too slowly with larger font sizes (#8994)
|
||||
Fix #8977: Crash when altering max sprite resolution (#8993)
|
||||
Fix #8956: Industry disaster news messages showed the wrong location (#8992)
|
||||
Fix: [Win32] Font glyphs of certain widths had broken rendering (#8990)
|
||||
Fix #8930: [Win32] Duplicate text input issue for systems using IME (#8976)
|
||||
Fix: [Network] Potential stale client entries in client list (#8959)
|
||||
Fix: Graphical issues when dragging measurement tooltips (#8951)
|
||||
Fix: [Fluidsynth] Use provided default soundfont if available (#8948, #8953)
|
||||
Fix #8935: [macOS] Crash on save (#8944)
|
||||
Fix #8922: Crash when selling shared vehicles with shared vehicle window open (#8926)
|
||||
Fix: Compiling on armhf (Raspberry Pi) (#8924)
|
||||
|
||||
|
||||
1.11.0 (2021-04-01)
|
||||
------------------------------------------------------------------------
|
||||
Feature: Allow setting a custom terrain type to define highest peak (#8891)
|
||||
Feature: Auto-detect map height limit based on generated map (#8891)
|
||||
Feature: Setting to indicate desert coverage for tropic climate and snow coverage for arctic climate (replaces snow line height) (#8891)
|
||||
Add: Allow setting the highest mountain for heightmaps (#8891)
|
||||
Change: Scale exported heightmaps to highest peak and inform the user of this value (#8891)
|
||||
Change: Remove "maximum map height" from the New Game GUI (#8891)
|
||||
Fix #8803: Only auto-remove signals when rail can be built (#8904)
|
||||
Fix #8565: Stopped road vehicle displays a speed different than 0 (#8901)
|
||||
Fix #8886: Don't try to resolve folders within tars named '.' (#8893)
|
||||
Fix: Placing random trees in SE crashes the game (#8892)
|
||||
Fix #8875: Filter string in station window breaks flow in user interface (#8885)
|
||||
Fix #8871: [OpenGL] Initialize all buffers after resize and clear back buffer (#8877)
|
||||
Fix: OpenGL performance with some AMD GPUs (#8876)
|
||||
Fix: Recompute road/railtype availability after disabling the engine (#8872)
|
||||
Fix: OSK layout not scaled for 2x or 4x GUI scale (#8868)
|
||||
|
||||
|
||||
1.11.0-RC1 (2021-03-14)
|
||||
------------------------------------------------------------------------
|
||||
Feature: Option to (dis-)allow hardware accelerated video drivers (#8819)
|
||||
Feature: Option to set display refresh rate (#8813)
|
||||
Feature: Allow custom width/height of screenshot and making heightmap screenshots via console (#8804)
|
||||
Feature: Allow filtering on name in rail station window (#8706)
|
||||
Feature: Setting for highest resolution of sprites to use (#8604)
|
||||
Add: Make NewGRF Scanner / World Generation update smoother and make aborting it react faster (#8830)
|
||||
Add: Malaysia Ringgit as Currency (#8783)
|
||||
Add: "Engines only" filter in build train window (#8733)
|
||||
Change: De-limit framerate window's framerate (#8772)
|
||||
Change: Clarify what effect town interactions have (#8744)
|
||||
Change: Don't show global goals in company goal windows (#8709)
|
||||
Change: Recolour graph windows to brown (#8700)
|
||||
Fix #8855: Bootstrap could result in an empty screen when bootstrap fails (#8856)
|
||||
Fix #8851: Don't allow infinite "exec" depth in script, but limit to 10 deep (#8852)
|
||||
Fix #8647: Incorrect drawing order of tram catenary sprites (#8843)
|
||||
Fix #8711: Having gui_zoom lower than zoom_min causes a crash (#8835)
|
||||
Fix #8810: "aircraft out of fuel" news shows the wrong place (#8832)
|
||||
Fix #8833: Don't reload NewGRFs when we are shutting down (#8830)
|
||||
Fix: Scale padding between elements the same as other padding (#8829)
|
||||
Fix #8808: [OSX, OpenGL] Crash on switching blitters due to double-mapping the video buffer (#8822)
|
||||
Fix #8784: Using Alt+Enter doesn't update the fullscreen toggle visibly (#8820)
|
||||
Fix #8817: Keep NewGRF order for object class sorting (#8818)
|
||||
Fix #8809: Crash when removing airport when hangar window open (#8815)
|
||||
Fix #8799: Crash when Search Internet in Multiplayer (#8801)
|
||||
Fix #8775: [Win32] Don't create the main window when Alt-Tabbing back into fullscreen (#8792)
|
||||
Fix #8774: Black screenshots when using 40bpp-blitter (#8791)
|
||||
Fix: [OSX] Hide dock when entering fullscreen (#8789)
|
||||
Fix: Bootstrap fails to start on clean install (#8788)
|
||||
Fix: Terraform limit acts random when maxing out per_64k_frames setting (#8782)
|
||||
Fix: Max-value of fast-forward-speed-limit can be outside its storage size (#8769)
|
||||
|
||||
|
||||
1.11.0-beta2 (2021-02-28)
|
||||
------------------------------------------------------------------------
|
||||
Feature: Add setting to limit fast-forward speed (#8766)
|
||||
Feature: Significant performance improvements to all video drivers (#8605, #8652, #8660, #8685, #8702, #8703, #8707, #8726, #8740)
|
||||
Feature: Configurable display refresh-rate, default to 60fps (#8680)
|
||||
Feature: Automatically upload releases to Steam (#8644)
|
||||
Feature: Generic Linux builds (#8641)
|
||||
Feature: [GS] Allow non-question type windows to have no buttons (#8638)
|
||||
Feature: [macOS] ZIP build (#8614)
|
||||
Feature: Object class selection string filtering (#8603)
|
||||
Feature: 'Remove all industries' button in scenario editor (#8550)
|
||||
Feature: Automatic UI and font zoom levels when supported by the OS (#8537)
|
||||
Feature: [macOS] Render screen at native resolution by default for HiDPI screens (#8519)
|
||||
Feature: OpenGL video driver (#7744)
|
||||
Add: Indonesia Rupiah currency (#8616)
|
||||
Change: Improve graph period markings (#8732)
|
||||
Change: Make pathfinder account for maximum order speed, if set (#8722)
|
||||
Change: Darken graph grid lines for legibility (#8690)
|
||||
Change: Make order window hotkeys toggle for load & unload variants (#8669)
|
||||
Change: Use a more specific error message when attempting to bulldoze your own HQ (#8667)
|
||||
Change: Convert .md to .rtf for Windows/Mac packages (#8617)
|
||||
Change: Move the 'tree placer algorithm' & 'road drive side' settings to the Settings window (#8566)
|
||||
Change: Move town name generator selection to mapgen GUI (#8566)
|
||||
Change: [macOS] Native font rendering (#8518)
|
||||
Fix: Display of network lobby windows for different GUI sizes (#8765)
|
||||
Fix: Don't desync if client leaves before you finish downloading map (#8755)
|
||||
Fix: Allow estimating vehicle clone cost even if short on money (#8748)
|
||||
Fix: Don't notify twice that a client left because of a timeout (#8746)
|
||||
Fix: Vehicle cursor size did not account for the interface zoom level (#8739)
|
||||
Fix #8123: Trams on half-tiles couldn't find depots (#8738)
|
||||
Fix #8276: [NewGRF] Crash when an object's size was not set (#8719)
|
||||
Fix #8349: Close depot vehicle list windows when closing the depot window (#8717)
|
||||
Fix #8594: [NRT] Road pathfinder did not account for roadtype speed limits or lengths of tunnels/bridges (#8710)
|
||||
Fix: Whole status bar instead of money widget refreshed on money change (#8692)
|
||||
Fix: Unnecessary status bar redraws when there is no news to show (#8691)
|
||||
Fix: New orders are non-stop by default (#8689)
|
||||
Fix: Framerate window showed a slightly higher rate than actually measured (#8682)
|
||||
Fix: Autorenew failure advice due to bad refit being shown to all companies (#8681)
|
||||
Fix #8625: Wrong ending year was displayed in highscore table (#8672)
|
||||
Fix #8620: Scale spacing between date & news in history window according to font scaling (#8671)
|
||||
Fix: [Win32] Set minimum resolution for timers to 1ms (#8660)
|
||||
Fix: Mention our websites with https:// (instead of http://) (#8657)
|
||||
Fix: [Emscripten] Open links in browser (#8655)
|
||||
Fix: Don't crash when towns upgrade road tiles during expansion (#8651)
|
||||
Fix #8029: [SDL2] Blank display when under Wayland (#8648)
|
||||
Fix: Default Network Server List sorter put compatible servers in wrong order (#8626)
|
||||
Fix: Use non-pulsating red highlight for coverage (#8622)
|
||||
Fix: Center text and image in vehicle statusbar vertically (#8602)
|
||||
Fix: Don't walk out of the map when trying to build tunnels (#8600)
|
||||
Fix: Off-by-one error in desert/rainforest positioning at world gen (#8588)
|
||||
Fix #8037: Crash when restarting AI that is controlling the same company as the player (#8587)
|
||||
Fix: Stopped ships shouldn't block depots (#8578)
|
||||
|
||||
|
||||
1.11.0-beta1 (2021-01-22)
|
||||
------------------------------------------------------------------------
|
||||
Feature: [GS] Ability to set some extra text in the industry window (#8576)
|
||||
Feature: Show rainforest under vegetation on smallmap (#8562)
|
||||
Feature: Automatically determine window size on new install (#8536)
|
||||
Feature: Towns can build tunnels (#8473)
|
||||
Feature: Make maximum length of town bridges depend on population (with a minimum limit of 4) (#8439)
|
||||
Feature: New icons for renaming and go-to-location on GUI windows, and improve consistency of usage (#8455)
|
||||
Feature: Support for ARM64 on Apple Silicon and Windows (#8340, #8577, #8583)
|
||||
Feature: Add an option to disable tree growth completely (#8415)
|
||||
Feature: Support for Emscripten (play-OpenTTD-in-the-browser!) (#8355)
|
||||
Feature: Show group name as part of the default vehicle name (#8307)
|
||||
Feature: "Frozen" economy setting that stops production changes and industry closures (#8282)
|
||||
Feature: New velocity unit "tiles/day" (#8278)
|
||||
Feature: Option to automatically remove signals when placing rail (#8274)
|
||||
Feature: Increase max possible distance from border for oil refineries and rigs (#8237)
|
||||
Feature: Improve tree planting window, and allow planting 'clumps' of trees by dragging in the scenario editor (#8234)
|
||||
Feature: Indian Rupee (INR) currency (#8136)
|
||||
Feature: [GS] Ability to give a company exclusive access to an industry (#8115)
|
||||
Feature: Hotkeys for Land Info window, News window & close error window (#8053, #8266)
|
||||
Feature: Improve rendering of large viewports (#7962)
|
||||
Feature: [GS] Influence industry production changes from GameScript (#7912)
|
||||
Feature: [GS] Push-buttons on storybook pages (#7896)
|
||||
Feature: Option to group vehicle lists by shared orders (#7028)
|
||||
Feature: Drag-and-drop vehicles in group GUI for shared order groups (#7028)
|
||||
Add: [GS] A tile parameter to GSCompany::ChangeBankBalance for showing changes more visually (#8573)
|
||||
Add: [NewGRF] Allow NewGRF vehicles to query the current rail/road/tram type (#8554)
|
||||
Add: [Script] ScriptCargo::GetName for the human readable name of cargoes (#8544)
|
||||
Add: "reload" console command to reload the current scenario or heightmap (#8527)
|
||||
Add: [NewGRF] Flag to test if inflation is on or off (#8427)
|
||||
Add: [Script] Native priority queue (useful for things like pathfinders) (#8091)
|
||||
Add: [NewGRF] Industry behaviour flag to override second cargo production clamping for water industries when using smooth economy (#8079)
|
||||
Change: [SDL2] Start game on the screen where the cursor is (#8572)
|
||||
Change: Use a dark background for all profit graphs to increase contrast (#8557)
|
||||
Change: Reword warning in cheat window (#8538)
|
||||
Change: Enable the toolbar for road/rail/dock/airport, regardless of vehicle availability (#8521)
|
||||
Change: For arctic and tropical climates, make sure at least a few hills are generated (#8513)
|
||||
Change: Destroying a tunnel/bridge now sells the tracks before destroying the tunnel/bridge (#8508)
|
||||
Change: Move "give money" from client-list to company window (#8500)
|
||||
Change: [MacOS] Hide Dock and menu when in fullscreen mode (#8487)
|
||||
Change: Improve performance for complex vehicle chains by resolving sprites less frequently (#8485)
|
||||
Change: Make engine reliability independent of introduction date (#8470)
|
||||
Change: Some default settings to improve gameplay for new players - default non-stop orders on, disable inflation, quick goto orders, show track reservations, and more (#8463)
|
||||
Change: Converting town-owned road types now requires a positive town rating (#8457)
|
||||
Change: Rework server list buttons for searching LAN/internet servers (#8426)
|
||||
Change: Add some styling to GS question windows depending on the type (#8422)
|
||||
Change: [Linkgraph] Speed up game exit by allowing job threads to be aborted early (#8416)
|
||||
Change: Prevent towns from building dead-end road bridges (#8401)
|
||||
Change: Send network error to the server before making an emergency save (#8387)
|
||||
Change: Extend the allowed range for max loan setting up to £2 billion (#8386)
|
||||
Change: Don't display OS name when exiting the game (#8366)
|
||||
Change: Save openttd.cfg immediately on changing a setting (#8358)
|
||||
Change: Autorenew now defaults to on (#8352)
|
||||
Change: [NewGRF] Also use aircraft property 12 for helicopters (#8347)
|
||||
Change: Service at depot also resets breakdown chance (#8317)
|
||||
Change: Use key names instead of characters in hotkey.cfg (#8291)
|
||||
Change: Allow command cost-estimation while paused (#8222)
|
||||
Change: Always apply inflation from 1920 to 2090, no matter the game start year (#7589)
|
||||
Change: Use CMake for build system (#7270)
|
||||
Change: [Linkgraph] Pause the game when linkgraph jobs lag (#7081)
|
||||
Change: Place "Group by" above "Sort by" in station window for consistency (#7028)
|
||||
Fix #8589: Prevent desyncs with vehicle motion counters and NewGRFs (#8591)
|
||||
Fix #7670: Improve pathfinder performance when lost vehicles are blocked from moving (#8568)
|
||||
Fix: Inform user if a custom font failed to load due to missing glyphs (#8559)
|
||||
Fix: Don't allow wagon chains (without an engine) to exceed maximum train length (#8533)
|
||||
Fix #7619: Super fast NewGRF aircraft could be unable to land (#8531)
|
||||
Fix: Improve connection retries for the content server in cases of broken networking (#8530)
|
||||
Fix #7972: Show invalid orders to stations that don't accept the vehicle (#8516)
|
||||
Fix: Error when trying to clone a vehicle with invalid orders (#8515)
|
||||
Fix #8050: Various off-by-one errors in how the end-year of the game was used (#8512)
|
||||
Fix #8332: Aborting vehicle group drag & drop could cause crashes (#8511)
|
||||
Fix #8168: Allow relocating HQ partially over an existing HQ (#8510)
|
||||
Fix #8068: Allow selling tram track regardless of bank balance (#8509)
|
||||
Fix #7604: Prevent houses from wandering away from roads (#8507)
|
||||
Fix: Make the "password" button the same size as the other buttons in the Company window (#8500)
|
||||
Fix #7611: Keep news about vehicle accidents around after the vehicle is cleaned up (#8497)
|
||||
Fix: [MacOS] Full animation in fullscreen mode was reducing the height of the window (#8491)
|
||||
Fix: [MacOS] Loading custom fonts (#8484)
|
||||
Fix: Network client makes emergency saves twice if the server is disconnected (#8477)
|
||||
Fix #8462: Stop towns from trying to build roads on water (#8471)
|
||||
Fix: [NewGRF] GetCurveSpeedLimit should use the railtype from the current tile (#8466)
|
||||
Fix #8437: Crash when using certain heliports with certain rotated airports (#8458)
|
||||
Fix #8437: Planes would land at the wrong height if the top corner of the airport was lowered (#8458)
|
||||
Fix #8297: Infrastructure counters for road tunnels, bridges & depots (#8454)
|
||||
Fix #6468: Don't store the version of AIs that are started via console (#8430)
|
||||
Fix: Don't lower tree density if spreading is not enabled (#8413)
|
||||
Fix: Prevent savegame version conflicts with certain old patchpacks (#8411)
|
||||
Fix: [NewGRF] Variable 0x44 was always HZB_TOWN_EDGE for road stops (#8400)
|
||||
Fix #8313: Use correct capitalization for TTO / DOS music files in the baseset metadata (#8385)
|
||||
Fix: [NewGRF] Action 7/9 conditions 0x0F to 0x12 failed, if 'param' was 0x88 (#8382)
|
||||
Fix: Change the working-dir searchpath when using '-c' (#8367)
|
||||
Fix: Useless warning with -snull and no BaseSounds available (#8361)
|
||||
Fix: Crash trying to load TTO/TTD savegames. (#8356)
|
||||
Fix: [Script] Don't echo script exceptions to console (#8331)
|
||||
Fix: Slovak ownname was using the wrong form (#8326)
|
||||
Fix #8311: [NewGRF] Industry probability at map generation was scaled differently when set via property or callback (#8312)
|
||||
Fix: Only check houses for cargo when generating subsidies with towns (#8305)
|
||||
Fix: Sprite preview in sprite aligner was too small with scaled UI (#8288)
|
||||
Fix: Spell 'Viewport' consistently (#8260)
|
||||
Fix #7772: Show vehicle destination on mouseover when vehicle stopped (#8236, #8543)
|
||||
Fix #8232: Huge screenshot warning was shown incorrectly (#8224)
|
||||
Fix #8153: Report incompatible cargo/order when autoreplace fails (#8169)
|
||||
Fix: [Script] ScriptMarine::AreWaterTilesConnected did not work for aqueducts (#8074)
|
||||
Fix #7645: Add cost of clearing the sloped tile to the price of a dock (#7947)
|
||||
Fix #6452: Reset only editable and visible settings from GUI (#7890)
|
||||
Fix: Original terrain generator did not keep a single gap of water at the borders (#7883)
|
||||
Remove: In-game console command "content select all" (#8363)
|
||||
Remove: [OSX] Support for OSX older than 10.7, including QuickTime music driver (#8078)
|
||||
|
||||
|
||||
1.10.3 (2020-08-09)
|
||||
------------------------------------------------------------------------
|
||||
Change: Also make roadside trees match the tree transparency option (#8245)
|
||||
Fix: Center text and icons in the status bar vertically (#8273)
|
||||
Fix: [NRT] Set invalid road and tram types for rail tunnel ends (#8269)
|
||||
Fix #7980: Properly invalidate mouse-over station coverage highlight (#8263)
|
||||
Fix #8250: [NRT] Company infrastructure window always omits last road/tramtype (#8251)
|
||||
Fix #8162: [NRT] Improve error message when converting town owned road (#8247)
|
||||
Fix #8216: Don't show floating text on autoreplace if cost is 0 (#8244)
|
||||
Fix #8129: Crash if a news message expires while viewing the endgame screen (#8243)
|
||||
Fix #8221: Use more specific error message when a bridge is too long (#8240)
|
||||
Fix #8230: Resolve ".." when opening files in .tar (#8231)
|
||||
Fix: A few race conditions in netcode (#8227, #8228, #8229)
|
||||
Fix #7838: Crash relating to group creation and renaming (#8223)
|
||||
Fix #8104: [SDL2] Fix window resizability when going from fullscreen to windowed mode (#8211)
|
||||
Fix: Display banlist's indexes correctly (#8209)
|
||||
Fix: Possible desync with subsidy creation (#8159)
|
||||
Fix #8131: Draw small bridges pillars in more places (#8149)
|
||||
|
||||
|
||||
1.10.2 (2020-06-01)
|
||||
------------------------------------------------------------------------
|
||||
Add: Ubuntu 20.04 packages (#8127)
|
||||
@@ -2436,7 +2002,7 @@ Note: OpenTTD was migrated to GitHub for 1.9, so SVN revision and FlySpray numbe
|
||||
- Fix: [NewGRF] Additional text in fund industry window is NewGRF supplied and thus should have a default colour (r22631)
|
||||
- Fix: Also initialise _old_vds with newgame settings; TTD savegames do not contain these settings [FS#4622] (r22626)
|
||||
- Fix: Do not zero the orders of disaster vehicles when converting savegames [FS#4642] (r22625)
|
||||
- Fix: When closing an AI company the local player cheated to, we need to cheat them to another company [FS#4654] (r22624, r22623)
|
||||
- Fix: When closing an AI company the local player cheated to, we need to cheat him to another company [FS#4654] (r22624, r22623)
|
||||
- Fix: When closing down companies their shares in other companies must be sold even if share trading is disabled at that point of time (r22622)
|
||||
- Fix: When asking the user to confirm an unsafe unpausing, there is no need to execute a command if 'no' is chosen. This also prevents crashing when clicking unpause while the confirm window is shown (r22621)
|
||||
- Fix: Enforce refit orders to be 'always go to depot' orders; service-only and stop-in-depot orders make no sense with refitting [FS#4651] (r22620)
|
||||
@@ -3158,7 +2724,7 @@ Note: OpenTTD was migrated to GitHub for 1.9, so SVN revision and FlySpray numbe
|
||||
- Fix: Chat message caused glitch when rejoining a network game [FS#3757] (r19629)
|
||||
- Fix: Desync when a command is received and in the queue while a client starts joining, i.e. save the game state. This can happen in two ways: with frame_freq > 1 a command received in a previous frame might not be executed yet or when a command is received in the same frame as the join but before the savegame is made. In both cases the joining client would not get all commands to get in-sync with the server (and the other clients) (r19620)
|
||||
- Fix: Company related graphs were not updated correctly after changing the company colour [FS#3763] (r19615)
|
||||
- Fix: Possible invalid read when server moves client to spectators before they finish joining [FS#3755] (r19613)
|
||||
- Fix: Possible invalid read when server moves client to spectators before he finishes joining [FS#3755] (r19613)
|
||||
- Fix: Crash when opening a savegame with a waypoint from around 0.4.0 [FS#3756] (r19612)
|
||||
- Fix: Improve joining behaviour; kicking clients when entering passwords that was just cleared, 'connection lost' for people failing the password, access restriction circumvention [CVE-2010-0401] [FS#3754] (r19610, r19609, r19608, r19607, r19606)
|
||||
- Fix: Desync debugging; false positives in the cache validity checks and saving/loading the command stream (r19619, r19617, r19602, r19601, r19600, r19596, r19593, r19592, r19589, r19587, r19586)
|
||||
@@ -3513,7 +3079,7 @@ Note: OpenTTD was migrated to GitHub for 1.9, so SVN revision and FlySpray numbe
|
||||
- Fix: Do not account for path reservation costs when entering a signal block via a 'block' signal. This way you will not get double penalties, both red signals and reservation costs, for the block signalled tracks [FS#2722] (r18535)
|
||||
- Fix: [NewGRF] An industry NewGRF that defined a too small size for action0 prop 0A could cause a crash (r18527)
|
||||
- Fix: Allegro does not like to work with extmidi, so warn the user about that [FS#3272] (r18520)
|
||||
- Fix: When you pass a signal at danger, in a PBS controlled area, do not try to do the 'safe' thing and stop, but continue going; the user wanted the train to pass the signal at danger so they have to suffer the consequences. Of course one can always stop the train manually [FS#2891] (r18515)
|
||||
- Fix: When you pass a signal at danger, in a PBS controlled area, do not try to do the 'safe' thing and stop, but continue going; the user wanted the train to pass the signal at danger so (s)he has to suffer the consequences. Of course one can always stop the train manually [FS#2891] (r18515)
|
||||
- Fix: No error message was created for the first fatal NewGRF error [FS#3368] (r18506)
|
||||
- Fix: Improve airport movement on several airports [FS#3169] (r18505)
|
||||
- Fix: Autoreplace and autorenew always reset their cargo sub type to 0. Now find a sub cargo type with the exact same name and use that, otherwise fallback to 0. So cargo sub types can be maintained via autoreplace *if* the new vehicle supports the same cargo sub type [FS#3159] (r18499)
|
||||
@@ -3996,7 +3562,7 @@ Note: OpenTTD was migrated to GitHub for 1.9, so SVN revision and FlySpray numbe
|
||||
- Fix: Make the join/spectate command require to be connected to a network game; in SP it could lead to crashes (r15514)
|
||||
- Fix: Generating a map with the original map generator with freeform edges on resulted in a crash [FS#2641] (r15511)
|
||||
- Fix: Pre-0.5 OpenTTD stored new_nonstop and full_load_any in a different way, savegame conversion was not working for them (r15500)
|
||||
- Fix: Crash when opening the game options when the currently loaded base graphics pack has less than 2 valid graphics files. For example when someone replaces all their original base graphics with custom work (but keeps the name) or renames the dos ones to windows or vice versa [FS#2630] (r15476)
|
||||
- Fix: Crash when opening the game options when the currently loaded base graphics pack has less than 2 valid graphics files. For example when someone replaces all his/her original base graphics with custom work (but keeps the name) or renames the dos ones to windows or vice versa [FS#2630] (r15476)
|
||||
|
||||
|
||||
0.7.0-beta1 (2009-02-16)
|
||||
@@ -4758,7 +4324,7 @@ Note: OpenTTD was migrated to GitHub for 1.9, so SVN revision and FlySpray numbe
|
||||
- Fix: Switching players (using the cheat) crashed on Big Endian machines [FS#1150] (r11023)
|
||||
- Fix: The canal border determination did not take oil rigs into consideration (r11022)
|
||||
- Fix: Do not display income/expenses when they do not belong to a 'valid' tile, like the money cheat/giving money [FS#1175] (r11021)
|
||||
- Fix: One could not give money when they had too much money or rather: when casting the amount of money to an int32 becomes negative [FS#1174] (r11020)
|
||||
- Fix: One could not give money when (s)he had too much money or rather: when casting the amount of money to an int32 becomes negative [FS#1174] (r11020)
|
||||
- Fix: When determining the gender of a string, do not assume that the gender is in the front of the string when there can be case switching code at that location [FS#1104] (r10792)
|
||||
- Fix: Determining whether there is a tunnel going under the lowered area is only needed in two directions instead of all four, so take the directions (one for each axis) to the nearest border (along the given axis) [FS#1058] (r10686)
|
||||
- Fix: Graphical glitches when the 'link landscape toolbar' patch is turned on when opening one of the construction toolbars [FS#1076] (r10685)
|
||||
@@ -4819,7 +4385,7 @@ Note: OpenTTD was migrated to GitHub for 1.9, so SVN revision and FlySpray numbe
|
||||
- Fix: Do not unconditionally assume that a tile has a depot (r11027)
|
||||
- Fix: Give a more correct error when building some things on tile 0 [FS#1173] (r11024)
|
||||
- Fix: Do not display income/expenses when they do not belong to a 'valid' tile, like the money cheat and giving money [FS#1175] (r11021)
|
||||
- Fix: One could not give money when they had too much money [FS#1174] (r11020)
|
||||
- Fix: One could not give money when (s)he had too much money [FS#1174] (r11020)
|
||||
- Fix: Disallow buying/selling shares in your own company or a bankrupt company [FS#1169] (r11018)
|
||||
- Fix: Crash when quitting the game in one of the end score windows [FS#1218] (r11071)
|
||||
|
||||
@@ -5779,7 +5345,7 @@ Note: OpenTTD was migrated to GitHub for 1.9, so SVN revision and FlySpray numbe
|
||||
- Fix: Vehicles slow down under bridge if the track is on a foundation
|
||||
- Fix: You can no longer change name of waypoints whom are owned by somebody else
|
||||
- Fix: Shares are now also sold when a company goes bankrupt [SF#1090313]
|
||||
- Fix: It is no longer possible to crash trains of other companies by building a depot close to a station; trains do no longer enter tiles that do not belong to their owner [SF#1087701]
|
||||
- Fix: It is no longer possible to crash trains of other companies by building a depot close to a station; trains do no longer enter tiles that do not belong to his owner [SF#1087701]
|
||||
- Fix: Crashed trains are not reported to have too few orders any more [SF#1087403]
|
||||
- Fix: Backup-order-list was not closed with an OT_NOTHING, [SF#1086375]
|
||||
- Fix: Docks now have a button to display the catchment area [SF#1085255]
|
||||
|
@@ -1,145 +0,0 @@
|
||||
macro(_parse_arguments_with_multi_hack ORIGINAL_COMMAND_LINE)
|
||||
# cmake_parse_arguments() put all the MULTIS in a single variable; you
|
||||
# lose the ability to see for example multiple COMMANDs. To be able to
|
||||
# passthrough multiple MULTIS, we add a marker after every MULTI. This
|
||||
# allows us to reassemble the correct amount again before giving it to
|
||||
# the wrapped command with _reassemble_command_line().
|
||||
|
||||
set(COMMAND_LINE "${ORIGINAL_COMMAND_LINE}")
|
||||
|
||||
foreach(MULTI IN LISTS MULTIS)
|
||||
string(REPLACE "${MULTI}" "${MULTI};:::" COMMAND_LINE "${COMMAND_LINE}")
|
||||
endforeach()
|
||||
|
||||
cmake_parse_arguments(PARAM "${OPTIONS}" "${SINGLES}" "${MULTIS}" ${COMMAND_LINE})
|
||||
endmacro()
|
||||
|
||||
macro(_reassemble_command_line)
|
||||
# Reassemble the command line as we original got it.
|
||||
set(NEW_COMMAND_LINE ${PARAM_UNPARSED_ARGUMENTS})
|
||||
|
||||
foreach(OPTION IN LISTS OPTIONS)
|
||||
if(PARAM_${OPTION})
|
||||
list(APPEND NEW_COMMAND_LINE "${OPTION}")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
foreach(SINGLE IN LISTS SINGLES)
|
||||
if(PARAM_${SINGLE})
|
||||
list(APPEND NEW_COMMAND_LINE "${SINGLE}" "${PARAM_${SINGLE}}")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
foreach(MULTI IN LISTS MULTIS)
|
||||
if(PARAM_${MULTI})
|
||||
# Replace our special marker with the name of the MULTI again. This
|
||||
# restores for example multiple COMMANDs again.
|
||||
string(REPLACE ":::" "${MULTI}" PARAM_${MULTI} "${PARAM_${MULTI}}")
|
||||
list(APPEND NEW_COMMAND_LINE "${PARAM_${MULTI}}")
|
||||
endif()
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
# Generated files can be older than their dependencies, causing useless
|
||||
# regenerations. This function replaces each file in OUTPUT with a .timestamp
|
||||
# file, adds a command to touch it and move the original file in BYPRODUCTS,
|
||||
# before calling add_custom_command().
|
||||
#
|
||||
# Note: Any add_custom_target() depending on files in original OUTPUT must use
|
||||
# add_custom_target_timestamp() instead to have the correct dependencies.
|
||||
#
|
||||
# add_custom_command_timestamp(OUTPUT output1 [output2 ...]
|
||||
# COMMAND command1 [ARGS] [args1...]
|
||||
# [COMMAND command2 [ARGS] [args2...] ...]
|
||||
# [MAIN_DEPENDENCY depend]
|
||||
# [DEPENDS [depends...]]
|
||||
# [BYPRODUCTS [files...]]
|
||||
# [IMPLICIT_DEPENDS <lang1> depend1
|
||||
# [<lang2> depend2] ...]
|
||||
# [WORKING_DIRECTORY dir]
|
||||
# [COMMENT comment]
|
||||
# [VERBATIM] [APPEND] [USES_TERMINAL])
|
||||
function(add_custom_command_timestamp)
|
||||
set(OPTIONS VERBATIM APPEND USES_TERMINAL)
|
||||
set(SINGLES MAIN_DEPENDENCY WORKING_DIRECTORY COMMENT)
|
||||
set(MULTIS OUTPUT COMMAND DEPENDS BYPRODUCTS IMPLICIT_DEPENDS)
|
||||
|
||||
_parse_arguments_with_multi_hack("${ARGN}")
|
||||
|
||||
# Create a list of all the OUTPUTs (by removing our magic marker)
|
||||
string(REPLACE ":::;" "" OUTPUTS "${PARAM_OUTPUT}")
|
||||
|
||||
# Reset the OUTPUT and BYPRODUCTS as an empty list (if needed).
|
||||
# Because they are MULTIS, we need to add our special marker here.
|
||||
set(PARAM_OUTPUT ":::")
|
||||
if(NOT PARAM_BYPRODUCTS)
|
||||
set(PARAM_BYPRODUCTS ":::")
|
||||
endif()
|
||||
|
||||
foreach(OUTPUT IN LISTS OUTPUTS)
|
||||
# For every output, we add a 'cmake -E touch' entry to update the
|
||||
# timestamp on each run.
|
||||
get_filename_component(OUTPUT_FILENAME ${OUTPUT} NAME)
|
||||
string(APPEND PARAM_COMMAND ";:::;${CMAKE_COMMAND};-E;touch;${CMAKE_CURRENT_BINARY_DIR}/${OUTPUT_FILENAME}.timestamp")
|
||||
|
||||
# We change the OUTPUT to a '.timestamp' variant, and make the real
|
||||
# output a byproduct.
|
||||
list(APPEND PARAM_OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/${OUTPUT_FILENAME}.timestamp)
|
||||
list(APPEND PARAM_BYPRODUCTS ${OUTPUT})
|
||||
|
||||
# Mark this file as being a byproduct; we use this again with
|
||||
# add_custom_target_timestamp() to know if we should point to the
|
||||
# '.timestamp' variant or not.
|
||||
set_source_files_properties(${OUTPUT} PROPERTIES BYPRODUCT ${CMAKE_CURRENT_BINARY_DIR}/${OUTPUT_FILENAME}.timestamp)
|
||||
endforeach()
|
||||
|
||||
# Reassemble and call the wrapped command
|
||||
_reassemble_command_line()
|
||||
add_custom_command(${NEW_COMMAND_LINE})
|
||||
endfunction()
|
||||
|
||||
# Generated files can be older than their dependencies, causing useless
|
||||
# regenerations. This function adds a .timestamp file for each file in DEPENDS
|
||||
# replaced by add_custom_command_timestamp(), before calling add_custom_target().
|
||||
#
|
||||
# add_custom_target_timestamp(Name [ALL] [command1 [args1...]]
|
||||
# [COMMAND command2 [args2...] ...]
|
||||
# [DEPENDS depend depend depend ... ]
|
||||
# [BYPRODUCTS [files...]]
|
||||
# [WORKING_DIRECTORY dir]
|
||||
# [COMMENT comment]
|
||||
# [VERBATIM] [USES_TERMINAL]
|
||||
# [SOURCES src1 [src2...]])
|
||||
function(add_custom_target_timestamp)
|
||||
set(OPTIONS VERBATIM USES_TERMINAL)
|
||||
set(SINGLES WORKING_DIRECTORY COMMENT)
|
||||
set(MULTIS COMMAND DEPENDS BYPRODUCTS SOURCES)
|
||||
# ALL is missing, as the order is important here. It will be picked up
|
||||
# by ${PARAM_UNPARSED_ARGUMENTS} when reassembling the command line.
|
||||
|
||||
_parse_arguments_with_multi_hack("${ARGN}")
|
||||
|
||||
# Create a list of all the DEPENDs (by removing our magic marker)
|
||||
string(REPLACE ":::;" "" DEPENDS "${PARAM_DEPENDS}")
|
||||
|
||||
# Reset the DEPEND as an empty list.
|
||||
# Because it is a MULTI, we need to add our special marker here.
|
||||
set(PARAM_DEPENDS ":::")
|
||||
|
||||
foreach(DEPEND IN LISTS DEPENDS)
|
||||
# Check if the output is produced by add_custom_command_timestamp()
|
||||
get_source_file_property(BYPRODUCT ${DEPEND} BYPRODUCT)
|
||||
|
||||
if(BYPRODUCT STREQUAL "NOTFOUND")
|
||||
# If it is not, just keep it as DEPEND
|
||||
list(APPEND PARAM_DEPENDS "${DEPEND}")
|
||||
else()
|
||||
# If it is, the BYPRODUCT property points to the timestamp we want to depend on
|
||||
list(APPEND PARAM_DEPENDS "${BYPRODUCT}")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
# Reassemble and call the wrapped command
|
||||
_reassemble_command_line()
|
||||
add_custom_target(${NEW_COMMAND_LINE})
|
||||
endfunction()
|
@@ -1,165 +0,0 @@
|
||||
# Macro which contains all bits to setup the compile flags correctly.
|
||||
#
|
||||
# compile_flags()
|
||||
#
|
||||
macro(compile_flags)
|
||||
if(MSVC)
|
||||
if(VCPKG_TARGET_TRIPLET MATCHES "-static" AND NOT VCPKG_TARGET_TRIPLET MATCHES "-md")
|
||||
# Switch to MT (static) instead of MD (dynamic) binary
|
||||
|
||||
# For MSVC two generators are available
|
||||
# - a command line generator (Ninja) using CMAKE_BUILD_TYPE to specify the
|
||||
# configuration of the build tree
|
||||
# - an IDE generator (Visual Studio) using CMAKE_CONFIGURATION_TYPES to
|
||||
# specify all configurations that will be available in the generated solution
|
||||
list(APPEND MSVC_CONFIGS "${CMAKE_BUILD_TYPE}" "${CMAKE_CONFIGURATION_TYPES}")
|
||||
|
||||
# Set usage of static runtime for all configurations
|
||||
foreach(MSVC_CONFIG ${MSVC_CONFIGS})
|
||||
string(TOUPPER "CMAKE_CXX_FLAGS_${MSVC_CONFIG}" MSVC_FLAGS)
|
||||
string(REPLACE "/MD" "/MT" ${MSVC_FLAGS} "${${MSVC_FLAGS}}")
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
# "If /Zc:rvalueCast is specified, the compiler follows section 5.4 of the
|
||||
# C++11 standard". We need C++11 for the way we use threads.
|
||||
add_compile_options(/Zc:rvalueCast)
|
||||
|
||||
if(NOT CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
|
||||
add_compile_options(
|
||||
/MP # Enable multi-threaded compilation.
|
||||
/FC # Display the full path of source code files passed to the compiler in diagnostics.
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Add some -D flags for Debug builds. We cannot use add_definitions(), because
|
||||
# it does not appear to support the $<> tags.
|
||||
add_compile_options(
|
||||
"$<$<CONFIG:Debug>:-D_DEBUG>"
|
||||
"$<$<NOT:$<CONFIG:Debug>>:-D_FORTIFY_SOURCE=2>" # FORTIFY_SOURCE should only be used in non-debug builds (requires -O1+)
|
||||
)
|
||||
if(MINGW)
|
||||
add_link_options(
|
||||
"$<$<NOT:$<CONFIG:Debug>>:-fstack-protector>" # Prevent undefined references when _FORTIFY_SOURCE > 0
|
||||
)
|
||||
endif()
|
||||
|
||||
# Prepare a generator that checks if we are not a debug, and don't have asserts
|
||||
# on. We need this later on to set some compile options for stable releases.
|
||||
set(IS_STABLE_RELEASE "$<AND:$<NOT:$<CONFIG:Debug>>,$<NOT:$<BOOL:${OPTION_USE_ASSERTS}>>>")
|
||||
|
||||
if(MSVC)
|
||||
add_compile_options(/W3)
|
||||
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" OR CMAKE_CXX_COMPILER_ID STREQUAL "Clang" OR CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang")
|
||||
add_compile_options(
|
||||
-W
|
||||
-Wall
|
||||
-Wcast-qual
|
||||
-Wextra
|
||||
-Wsign-compare
|
||||
-Wundef
|
||||
-Wpointer-arith
|
||||
-Wwrite-strings
|
||||
-Wredundant-decls
|
||||
-Wformat-security
|
||||
-Wformat=2
|
||||
-Winit-self
|
||||
-Wnon-virtual-dtor
|
||||
|
||||
# Often parameters are unused, which is fine.
|
||||
-Wno-unused-parameter
|
||||
# We use 'ABCD' multichar for SaveLoad chunks identifiers
|
||||
-Wno-multichar
|
||||
|
||||
# Compilers complains about that we break strict-aliasing.
|
||||
# On most places we don't see how to fix it, and it doesn't
|
||||
# break anything. So disable strict-aliasing to make the
|
||||
# compiler all happy.
|
||||
-fno-strict-aliasing
|
||||
)
|
||||
|
||||
# When we are a stable release (Release build + USE_ASSERTS not set),
|
||||
# assertations are off, which trigger a lot of warnings. We disable
|
||||
# these warnings for these releases.
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
add_compile_options(
|
||||
"$<${IS_STABLE_RELEASE}:-Wno-unused-variable>"
|
||||
"$<${IS_STABLE_RELEASE}:-Wno-unused-but-set-parameter>"
|
||||
"$<${IS_STABLE_RELEASE}:-Wno-unused-but-set-variable>"
|
||||
)
|
||||
else()
|
||||
add_compile_options(
|
||||
"$<${IS_STABLE_RELEASE}:-Wno-unused-variable>"
|
||||
"$<${IS_STABLE_RELEASE}:-Wno-unused-parameter>"
|
||||
)
|
||||
endif()
|
||||
|
||||
# Ninja processes the output so the output from the compiler
|
||||
# isn't directly to a terminal; hence, the default is
|
||||
# non-coloured output. We can override this to get nicely
|
||||
# coloured output, but since that might yield odd results with
|
||||
# IDEs, we extract it to an option.
|
||||
if(OPTION_FORCE_COLORED_OUTPUT)
|
||||
if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
|
||||
add_compile_options (-fdiagnostics-color=always)
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" OR CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang")
|
||||
add_compile_options (-fcolor-diagnostics)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
include(CheckCXXCompilerFlag)
|
||||
check_cxx_compiler_flag("-flifetime-dse=1" LIFETIME_DSE_FOUND)
|
||||
|
||||
add_compile_options(
|
||||
# GCC 4.2+ automatically assumes that signed overflows do
|
||||
# not occur in signed arithmetics, whereas we are not
|
||||
# sure that they will not happen. It furthermore complains
|
||||
# about its own optimized code in some places.
|
||||
"-fno-strict-overflow"
|
||||
|
||||
# Prevent optimisation supposing enums are in a range specified by the standard
|
||||
# For details, see http://gcc.gnu.org/PR43680
|
||||
"-fno-tree-vrp"
|
||||
|
||||
# -flifetime-dse=2 (default since GCC 6) doesn't play
|
||||
# well with our custom pool item allocator
|
||||
"$<$<BOOL:${LIFETIME_DSE_FOUND}>:-flifetime-dse=1>"
|
||||
)
|
||||
endif()
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang")
|
||||
if (NOT CMAKE_OSX_ARCHITECTURES STREQUAL "arm64")
|
||||
include(CheckCXXCompilerFlag)
|
||||
check_cxx_compiler_flag("-mno-sse4" NO_SSE4_FOUND)
|
||||
|
||||
if(NO_SSE4_FOUND)
|
||||
add_compile_options(
|
||||
# Don't use SSE4 for general sources to increase compatibility.
|
||||
-mno-sse4
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
add_compile_options(
|
||||
-Wall
|
||||
# warning #873: function ... ::operator new ... has no corresponding operator delete ...
|
||||
-wd873
|
||||
# warning #1292: unknown attribute "fallthrough"
|
||||
-wd1292
|
||||
# warning #1899: multicharacter character literal (potential portability problem)
|
||||
-wd1899
|
||||
# warning #2160: anonymous union qualifier is ignored
|
||||
-wd2160
|
||||
)
|
||||
else()
|
||||
message(FATAL_ERROR "No warning flags are set for this compiler yet; please consider creating a Pull Request to add support for this compiler.")
|
||||
endif()
|
||||
|
||||
if(NOT WIN32 AND NOT HAIKU)
|
||||
# rdynamic is used to get useful stack traces from crash reports.
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -rdynamic")
|
||||
endif()
|
||||
endmacro()
|
@@ -1,50 +0,0 @@
|
||||
# Macro which contains all bits and pieces to create a single grf file based
|
||||
# on NFO and PNG files.
|
||||
#
|
||||
# create_grf_command()
|
||||
#
|
||||
function(create_grf_command)
|
||||
set(EXTRA_PNG_SOURCE_FILES ${ARGV})
|
||||
|
||||
get_filename_component(GRF_SOURCE_FOLDER_NAME "${CMAKE_CURRENT_SOURCE_DIR}" NAME)
|
||||
get_filename_component(GRF_BINARY_FILE ${CMAKE_CURRENT_SOURCE_DIR}/../${GRF_SOURCE_FOLDER_NAME}.grf ABSOLUTE)
|
||||
file(GLOB_RECURSE GRF_PNG_SOURCE_FILES ${CMAKE_CURRENT_SOURCE_DIR}/*.png)
|
||||
file(GLOB_RECURSE GRF_NFO_SOURCE_FILES ${CMAKE_CURRENT_SOURCE_DIR}/*.nfo)
|
||||
set(GRF_PNG_SOURCE_FILES ${GRF_PNG_SOURCE_FILES} ${EXTRA_PNG_SOURCE_FILES})
|
||||
|
||||
# Copy over all the PNG files to the correct folder
|
||||
foreach(GRF_PNG_SOURCE_FILE IN LISTS GRF_PNG_SOURCE_FILES)
|
||||
get_filename_component(GRF_PNG_SOURCE_FILE_NAME "${GRF_PNG_SOURCE_FILE}" NAME)
|
||||
set(GRF_PNG_BINARY_FILE "${CMAKE_CURRENT_BINARY_DIR}/sprites/${GRF_PNG_SOURCE_FILE_NAME}")
|
||||
|
||||
add_custom_command(OUTPUT ${GRF_PNG_BINARY_FILE}
|
||||
COMMAND ${CMAKE_COMMAND} -E copy
|
||||
${GRF_PNG_SOURCE_FILE}
|
||||
${GRF_PNG_BINARY_FILE}
|
||||
MAIN_DEPENDENCY ${GRF_PNG_SOURCE_FILE}
|
||||
COMMENT "Copying ${GRF_PNG_SOURCE_FILE_NAME} sprite file"
|
||||
)
|
||||
|
||||
list(APPEND GRF_PNG_BINARY_FILES ${GRF_PNG_BINARY_FILE})
|
||||
endforeach()
|
||||
|
||||
add_custom_command(OUTPUT ${GRF_BINARY_FILE}
|
||||
COMMAND ${CMAKE_COMMAND}
|
||||
-DGRF_SOURCE_FOLDER=${CMAKE_CURRENT_SOURCE_DIR}
|
||||
-DGRF_BINARY_FILE=${GRF_BINARY_FILE}
|
||||
-DNFORENUM_EXECUTABLE=${NFORENUM_EXECUTABLE}
|
||||
-DGRFCODEC_EXECUTABLE=${GRFCODEC_EXECUTABLE}
|
||||
-P ${CMAKE_SOURCE_DIR}/cmake/scripts/CreateGRF.cmake
|
||||
MAIN_DEPENDENCY ${CMAKE_SOURCE_DIR}/cmake/scripts/CreateGRF.cmake
|
||||
DEPENDS ${GRF_PNG_BINARY_FILES}
|
||||
${GRF_NFO_SOURCE_FILES}
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
|
||||
COMMENT "Generating ${GRF_SOURCE_FOLDER_NAME}.grf"
|
||||
)
|
||||
|
||||
# For conviance, if you want to only test building the GRF
|
||||
add_custom_target(${GRF_SOURCE_FOLDER_NAME}.grf
|
||||
DEPENDS
|
||||
${GRF_BINARY_FILE}
|
||||
)
|
||||
endfunction()
|
@@ -1,86 +0,0 @@
|
||||
# Macro which contains all bits and pieces to create the regression tests.
|
||||
# This creates both a standalone target 'regression', and it integrates with
|
||||
# 'ctest'. The first is prefered, as it is more verbose, and takes care of
|
||||
# dependencies correctly.
|
||||
#
|
||||
# create_regression()
|
||||
#
|
||||
macro(create_regression)
|
||||
# Find all the files in the regression folder; they need to be copied to the
|
||||
# build folder before we can run the regression
|
||||
file(GLOB_RECURSE REGRESSION_SOURCE_FILES ${CMAKE_SOURCE_DIR}/regression/*)
|
||||
foreach(REGRESSION_SOURCE_FILE IN LISTS REGRESSION_SOURCE_FILES)
|
||||
string(REPLACE "${CMAKE_SOURCE_DIR}/regression/" "" REGRESSION_SOURCE_FILE_NAME "${REGRESSION_SOURCE_FILE}")
|
||||
string(CONCAT REGRESSION_BINARY_FILE "${CMAKE_BINARY_DIR}/ai/" "${REGRESSION_SOURCE_FILE_NAME}")
|
||||
|
||||
if("${REGRESSION_SOURCE_FILE_NAME}" STREQUAL "regression.cfg")
|
||||
continue()
|
||||
endif()
|
||||
|
||||
add_custom_command(OUTPUT ${REGRESSION_BINARY_FILE}
|
||||
COMMAND ${CMAKE_COMMAND} -E copy
|
||||
${REGRESSION_SOURCE_FILE}
|
||||
${REGRESSION_BINARY_FILE}
|
||||
MAIN_DEPENDENCY ${REGRESSION_SOURCE_FILE}
|
||||
COMMENT "Copying ${REGRESSION_SOURCE_FILE_NAME} regression file"
|
||||
)
|
||||
|
||||
list(APPEND REGRESSION_BINARY_FILES ${REGRESSION_BINARY_FILE})
|
||||
endforeach()
|
||||
|
||||
# Copy the regression configuration in a special folder, so all autogenerated
|
||||
# folders end up in the same place after running regression.
|
||||
add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/regression/regression.cfg
|
||||
COMMAND ${CMAKE_COMMAND} -E copy
|
||||
${CMAKE_SOURCE_DIR}/regression/regression.cfg
|
||||
${CMAKE_BINARY_DIR}/regression/regression.cfg
|
||||
MAIN_DEPENDENCY ${CMAKE_SOURCE_DIR}/regression/regression.cfg
|
||||
COMMENT "Copying ${REGRESSION_SOURCE_FILE_NAME} regression file"
|
||||
)
|
||||
list(APPEND REGRESSION_BINARY_FILES ${CMAKE_BINARY_DIR}/regression/regression.cfg)
|
||||
|
||||
# Create a new target which copies all regression files
|
||||
add_custom_target(regression_files
|
||||
ALL # this is needed because 'make test' doesn't resolve dependencies, and otherwise this is never executed
|
||||
DEPENDS
|
||||
${REGRESSION_BINARY_FILES}
|
||||
)
|
||||
|
||||
enable_testing()
|
||||
|
||||
# Find all the tests we have, and create a target for them
|
||||
file(GLOB REGRESSION_TESTS ${CMAKE_SOURCE_DIR}/regression/*)
|
||||
foreach(REGRESSION_TEST IN LISTS REGRESSION_TESTS)
|
||||
get_filename_component(REGRESSION_TEST_NAME "${REGRESSION_TEST}" NAME)
|
||||
|
||||
if("${REGRESSION_TEST_NAME}" STREQUAL "regression.cfg")
|
||||
continue()
|
||||
endif()
|
||||
|
||||
add_custom_target(regression_${REGRESSION_TEST_NAME}
|
||||
COMMAND ${CMAKE_COMMAND}
|
||||
-DOPENTTD_EXECUTABLE=$<TARGET_FILE:openttd>
|
||||
-DEDITBIN_EXECUTABLE=${EDITBIN_EXECUTABLE}
|
||||
-DREGRESSION_TEST=${REGRESSION_TEST_NAME}
|
||||
-P "${CMAKE_SOURCE_DIR}/cmake/scripts/Regression.cmake"
|
||||
DEPENDS openttd regression_files
|
||||
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
|
||||
COMMENT "Running regression test ${REGRESSION_TEST_NAME}"
|
||||
)
|
||||
|
||||
# Also make sure that 'make test' runs the regression
|
||||
add_test(NAME regression_${REGRESSION_TEST_NAME}
|
||||
COMMAND ${CMAKE_COMMAND}
|
||||
-DOPENTTD_EXECUTABLE=$<TARGET_FILE:openttd>
|
||||
-DEDITBIN_EXECUTABLE=${EDITBIN_EXECUTABLE}
|
||||
-DREGRESSION_TEST=${REGRESSION_TEST_NAME}
|
||||
-P "${CMAKE_SOURCE_DIR}/cmake/scripts/Regression.cmake"
|
||||
WORKING_DIRECTORY ${CMAKE_BINARY_DIR})
|
||||
|
||||
list(APPEND REGRESSION_TARGETS regression_${REGRESSION_TEST_NAME})
|
||||
endforeach()
|
||||
|
||||
# Create a new target which runs the regression
|
||||
add_custom_target(regression
|
||||
DEPENDS ${REGRESSION_TARGETS})
|
||||
endmacro()
|
@@ -1,14 +0,0 @@
|
||||
# Add the definitions to indicate which endian we are building for.
|
||||
#
|
||||
# add_endian_definition()
|
||||
#
|
||||
function(add_endian_definition)
|
||||
include(TestBigEndian)
|
||||
TEST_BIG_ENDIAN(IS_BIG_ENDIAN)
|
||||
|
||||
if(IS_BIG_ENDIAN)
|
||||
add_definitions(-DTTD_ENDIAN=TTD_BIG_ENDIAN)
|
||||
else()
|
||||
add_definitions(-DTTD_ENDIAN=TTD_LITTLE_ENDIAN)
|
||||
endif()
|
||||
endfunction()
|
@@ -1,65 +0,0 @@
|
||||
#[=======================================================================[.rst:
|
||||
FindAllegro
|
||||
-------
|
||||
|
||||
Finds the allegro library.
|
||||
|
||||
Result Variables
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
This will define the following variables:
|
||||
|
||||
``Allegro_FOUND``
|
||||
True if the system has the allegro library.
|
||||
``Allegro_INCLUDE_DIRS``
|
||||
Include directories needed to use allegro.
|
||||
``Allegro_LIBRARIES``
|
||||
Libraries needed to link to allegro.
|
||||
``Allegro_VERSION``
|
||||
The version of the allegro library which was found.
|
||||
|
||||
Cache Variables
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
The following cache variables may also be set:
|
||||
|
||||
``Allegro_INCLUDE_DIR``
|
||||
The directory containing ``allegro.h``.
|
||||
``Allegro_LIBRARY``
|
||||
The path to the allegro library.
|
||||
|
||||
#]=======================================================================]
|
||||
|
||||
find_package(PkgConfig QUIET)
|
||||
pkg_check_modules(PC_Allegro QUIET allegro<5)
|
||||
|
||||
find_path(Allegro_INCLUDE_DIR
|
||||
NAMES allegro.h
|
||||
PATHS ${PC_Allegro_INCLUDE_DIRS}
|
||||
)
|
||||
|
||||
find_library(Allegro_LIBRARY
|
||||
NAMES alleg
|
||||
PATHS ${PC_Allegro_LIBRARY_DIRS}
|
||||
)
|
||||
|
||||
set(Allegro_VERSION ${PC_Allegro_VERSION})
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(Allegro
|
||||
FOUND_VAR Allegro_FOUND
|
||||
REQUIRED_VARS
|
||||
Allegro_LIBRARY
|
||||
Allegro_INCLUDE_DIR
|
||||
VERSION_VAR Allegro_VERSION
|
||||
)
|
||||
|
||||
if(Allegro_FOUND)
|
||||
set(Allegro_LIBRARIES ${Allegro_LIBRARY})
|
||||
set(Allegro_INCLUDE_DIRS ${Allegro_INCLUDE_DIR})
|
||||
endif()
|
||||
|
||||
mark_as_advanced(
|
||||
Allegro_INCLUDE_DIR
|
||||
Allegro_LIBRARY
|
||||
)
|
@@ -1,30 +0,0 @@
|
||||
# Autodetect editbin. Only useful for MSVC.
|
||||
|
||||
if(NOT EDITBIN_DIRECTORY)
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
get_filename_component(MSVC_COMPILE_DIRECTORY ${CMAKE_CXX_COMPILER} DIRECTORY)
|
||||
set(EDITBIN_DIRECTORY ${MSVC_COMPILE_DIRECTORY})
|
||||
else()
|
||||
# For clang-cl build
|
||||
# find editbin.exe from environmental variable VCToolsInstallDir
|
||||
set(EDITBIN_DIRECTORY "$ENV{VCToolsInstallDir}/bin/Hostx64/x64")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
message(CHECK_START "Finding editbin.exe")
|
||||
find_program(
|
||||
EDITBIN_EXECUTABLE editbin.exe
|
||||
HINTS ${EDITBIN_DIRECTORY}
|
||||
)
|
||||
|
||||
if(EDITBIN_EXECUTABLE)
|
||||
message(CHECK_PASS "found")
|
||||
else()
|
||||
message(CHECK_FAIL "not found , please manually specify EDITBIN_DIRECTORY")
|
||||
endif()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(Editbin
|
||||
FOUND_VAR EDITBIN_FOUND
|
||||
REQUIRED_VARS EDITBIN_EXECUTABLE
|
||||
)
|
@@ -1,65 +0,0 @@
|
||||
#[=======================================================================[.rst:
|
||||
FindFluidsynth
|
||||
-------
|
||||
|
||||
Finds the fluidsynth library.
|
||||
|
||||
Result Variables
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
This will define the following variables:
|
||||
|
||||
``Fluidsynth_FOUND``
|
||||
True if the system has the fluidsynth library.
|
||||
``Fluidsynth_INCLUDE_DIRS``
|
||||
Include directories needed to use fluidsynth.
|
||||
``Fluidsynth_LIBRARIES``
|
||||
Libraries needed to link to fluidsynth.
|
||||
``Fluidsynth_VERSION``
|
||||
The version of the fluidsynth library which was found.
|
||||
|
||||
Cache Variables
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
The following cache variables may also be set:
|
||||
|
||||
``Fluidsynth_INCLUDE_DIR``
|
||||
The directory containing ``fluidsynth.h``.
|
||||
``Fluidsynth_LIBRARY``
|
||||
The path to the fluidsynth library.
|
||||
|
||||
#]=======================================================================]
|
||||
|
||||
find_package(PkgConfig QUIET)
|
||||
pkg_check_modules(PC_Fluidsynth QUIET fluidsynth)
|
||||
|
||||
find_path(Fluidsynth_INCLUDE_DIR
|
||||
NAMES fluidsynth.h
|
||||
PATHS ${PC_Fluidsynth_INCLUDE_DIRS}
|
||||
)
|
||||
|
||||
find_library(Fluidsynth_LIBRARY
|
||||
NAMES fluidsynth
|
||||
PATHS ${PC_Fluidsynth_LIBRARY_DIRS}
|
||||
)
|
||||
|
||||
set(Fluidsynth_VERSION ${PC_Fluidsynth_VERSION})
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(Fluidsynth
|
||||
FOUND_VAR Fluidsynth_FOUND
|
||||
REQUIRED_VARS
|
||||
Fluidsynth_LIBRARY
|
||||
Fluidsynth_INCLUDE_DIR
|
||||
VERSION_VAR Fluidsynth_VERSION
|
||||
)
|
||||
|
||||
if(Fluidsynth_FOUND)
|
||||
set(Fluidsynth_LIBRARIES ${Fluidsynth_LIBRARY})
|
||||
set(Fluidsynth_INCLUDE_DIRS ${Fluidsynth_INCLUDE_DIR})
|
||||
endif()
|
||||
|
||||
mark_as_advanced(
|
||||
Fluidsynth_INCLUDE_DIR
|
||||
Fluidsynth_LIBRARY
|
||||
)
|
@@ -1,101 +0,0 @@
|
||||
# Distributed under the OSI-approved BSD 3-Clause License. See accompanying
|
||||
# file Copyright.txt or https://cmake.org/licensing for details.
|
||||
|
||||
#[=======================================================================[.rst:
|
||||
FindFontconfig
|
||||
--------------
|
||||
|
||||
Find Fontconfig headers and library.
|
||||
|
||||
Imported Targets
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
``Fontconfig::Fontconfig``
|
||||
The Fontconfig library, if found.
|
||||
|
||||
Result Variables
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
This will define the following variables in your project:
|
||||
|
||||
``Fontconfig_FOUND``
|
||||
true if (the requested version of) Fontconfig is available.
|
||||
``Fontconfig_VERSION``
|
||||
the version of Fontconfig.
|
||||
``Fontconfig_LIBRARIES``
|
||||
the libraries to link against to use Fontconfig.
|
||||
``Fontconfig_INCLUDE_DIRS``
|
||||
where to find the Fontconfig headers.
|
||||
``Fontconfig_COMPILE_OPTIONS``
|
||||
this should be passed to target_compile_options(), if the
|
||||
target is not used for linking
|
||||
|
||||
#]=======================================================================]
|
||||
|
||||
|
||||
# use pkg-config to get the directories and then use these values
|
||||
# in the FIND_PATH() and FIND_LIBRARY() calls
|
||||
find_package(PkgConfig QUIET)
|
||||
pkg_check_modules(PKG_FONTCONFIG QUIET fontconfig)
|
||||
set(Fontconfig_COMPILE_OPTIONS ${PKG_FONTCONFIG_CFLAGS_OTHER})
|
||||
set(Fontconfig_VERSION ${PKG_FONTCONFIG_VERSION})
|
||||
|
||||
find_path( Fontconfig_INCLUDE_DIR
|
||||
NAMES
|
||||
fontconfig/fontconfig.h
|
||||
HINTS
|
||||
${PKG_FONTCONFIG_INCLUDE_DIRS}
|
||||
/usr/X11/include
|
||||
)
|
||||
|
||||
find_library( Fontconfig_LIBRARY
|
||||
NAMES
|
||||
fontconfig
|
||||
PATHS
|
||||
${PKG_FONTCONFIG_LIBRARY_DIRS}
|
||||
)
|
||||
|
||||
if(Fontconfig_INCLUDE_DIR AND NOT Fontconfig_VERSION)
|
||||
file(STRINGS ${Fontconfig_INCLUDE_DIR}/fontconfig/fontconfig.h _contents REGEX "^#define[ \t]+FC_[A-Z]+[ \t]+[0-9]+$")
|
||||
unset(Fontconfig_VERSION)
|
||||
foreach(VPART MAJOR MINOR REVISION)
|
||||
foreach(VLINE ${_contents})
|
||||
if(VLINE MATCHES "^#define[\t ]+FC_${VPART}[\t ]+([0-9]+)$")
|
||||
set(Fontconfig_VERSION_PART "${CMAKE_MATCH_1}")
|
||||
if(Fontconfig_VERSION)
|
||||
string(APPEND Fontconfig_VERSION ".${Fontconfig_VERSION_PART}")
|
||||
else()
|
||||
set(Fontconfig_VERSION "${Fontconfig_VERSION_PART}")
|
||||
endif()
|
||||
endif()
|
||||
endforeach()
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(Fontconfig
|
||||
FOUND_VAR
|
||||
Fontconfig_FOUND
|
||||
REQUIRED_VARS
|
||||
Fontconfig_LIBRARY
|
||||
Fontconfig_INCLUDE_DIR
|
||||
VERSION_VAR
|
||||
Fontconfig_VERSION
|
||||
)
|
||||
|
||||
|
||||
if(Fontconfig_FOUND AND NOT TARGET Fontconfig::Fontconfig)
|
||||
add_library(Fontconfig::Fontconfig UNKNOWN IMPORTED)
|
||||
set_target_properties(Fontconfig::Fontconfig PROPERTIES
|
||||
IMPORTED_LOCATION "${Fontconfig_LIBRARY}"
|
||||
INTERFACE_COMPILE_OPTIONS "${Fontconfig_COMPILE_OPTIONS}"
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${Fontconfig_INCLUDE_DIR}"
|
||||
)
|
||||
endif()
|
||||
|
||||
mark_as_advanced(Fontconfig_LIBRARY Fontconfig_INCLUDE_DIR)
|
||||
|
||||
if(Fontconfig_FOUND)
|
||||
set(Fontconfig_LIBRARIES ${Fontconfig_LIBRARY})
|
||||
set(Fontconfig_INCLUDE_DIRS ${Fontconfig_INCLUDE_DIR})
|
||||
endif()
|
@@ -1,13 +0,0 @@
|
||||
# Autodetect grfcodec and nforenum.
|
||||
#
|
||||
|
||||
find_program(GRFCODEC_EXECUTABLE grfcodec)
|
||||
find_program(NFORENUM_EXECUTABLE nforenum)
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(Grfcodec
|
||||
FOUND_VAR GRFCODEC_FOUND
|
||||
REQUIRED_VARS
|
||||
GRFCODEC_EXECUTABLE
|
||||
NFORENUM_EXECUTABLE
|
||||
)
|
@@ -1,69 +0,0 @@
|
||||
# CMake provides a FindICU module since version 3.7.
|
||||
# But it doesn't use pkgconfig, doesn't set expected variables,
|
||||
# And it returns incomplete dependencies if only some modules are searched.
|
||||
|
||||
|
||||
#[=======================================================================[.rst:
|
||||
FindICU
|
||||
-------
|
||||
|
||||
Finds components of the ICU library.
|
||||
|
||||
Accepted components are: uc, i18n, le, lx, io
|
||||
|
||||
Result Variables
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
This will define the following variables:
|
||||
|
||||
``ICU_FOUND``
|
||||
True if components of ICU library are found.
|
||||
``ICU_VERSION``
|
||||
The version of the ICU library which was found.
|
||||
``ICU_<c>_FOUND``
|
||||
True if the system has the <c> component of ICU library.
|
||||
``ICU_<c>_INCLUDE_DIRS``
|
||||
Include directories needed to use the <c> component of ICU library.
|
||||
``ICU_<c>_LIBRARIES``
|
||||
Libraries needed to link to the <c> component of ICU library.
|
||||
|
||||
#]=======================================================================]
|
||||
|
||||
find_package(PkgConfig QUIET)
|
||||
|
||||
set(ICU_KNOWN_COMPONENTS "uc" "i18n" "le" "lx" "io")
|
||||
|
||||
foreach(MOD_NAME IN LISTS ICU_FIND_COMPONENTS)
|
||||
if(NOT MOD_NAME IN_LIST ICU_KNOWN_COMPONENTS)
|
||||
message(FATAL_ERROR "Unknown ICU component: ${MOD_NAME}")
|
||||
endif()
|
||||
pkg_check_modules(PC_ICU_${MOD_NAME} QUIET icu-${MOD_NAME})
|
||||
|
||||
# Check the libraries returned by pkg-config really exist.
|
||||
unset(PC_LIBRARIES)
|
||||
foreach(LIBRARY IN LISTS PC_ICU_${MOD_NAME}_LIBRARIES)
|
||||
unset(PC_LIBRARY CACHE)
|
||||
find_library(PC_LIBRARY NAMES ${LIBRARY})
|
||||
if(NOT PC_LIBRARY)
|
||||
unset(PC_ICU_${MOD_NAME}_FOUND)
|
||||
endif()
|
||||
list(APPEND PC_LIBRARIES ${PC_LIBRARY})
|
||||
endforeach()
|
||||
unset(PC_LIBRARY CACHE)
|
||||
|
||||
if(${PC_ICU_${MOD_NAME}_FOUND})
|
||||
set(ICU_COMPONENT_FOUND TRUE)
|
||||
set(ICU_${MOD_NAME}_FOUND TRUE)
|
||||
set(ICU_${MOD_NAME}_LIBRARIES ${PC_LIBRARIES})
|
||||
set(ICU_${MOD_NAME}_INCLUDE_DIRS ${PC_ICU_${MOD_NAME}_INCLUDE_DIRS})
|
||||
set(ICU_VERSION ${PC_ICU_${MOD_NAME}_VERSION})
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(ICU
|
||||
FOUND_VAR ICU_FOUND
|
||||
REQUIRED_VARS ICU_COMPONENT_FOUND
|
||||
VERSION_VAR ICU_VERSION
|
||||
HANDLE_COMPONENTS
|
||||
)
|
@@ -1,133 +0,0 @@
|
||||
# Distributed under the OSI-approved BSD 3-Clause License. See accompanying
|
||||
# file Copyright.txt or https://cmake.org/licensing for details.
|
||||
|
||||
#[=======================================================================[.rst:
|
||||
FindIconv
|
||||
---------
|
||||
|
||||
This module finds the ``iconv()`` POSIX.1 functions on the system.
|
||||
These functions might be provided in the regular C library or externally
|
||||
in the form of an additional library.
|
||||
|
||||
The following variables are provided to indicate iconv support:
|
||||
|
||||
.. variable:: Iconv_FOUND
|
||||
|
||||
Variable indicating if the iconv support was found.
|
||||
|
||||
.. variable:: Iconv_INCLUDE_DIRS
|
||||
|
||||
The directories containing the iconv headers.
|
||||
|
||||
.. variable:: Iconv_LIBRARIES
|
||||
|
||||
The iconv libraries to be linked.
|
||||
|
||||
.. variable:: Iconv_IS_BUILT_IN
|
||||
|
||||
A variable indicating whether iconv support is stemming from the
|
||||
C library or not. Even if the C library provides `iconv()`, the presence of
|
||||
an external `libiconv` implementation might lead to this being false.
|
||||
|
||||
Additionally, the following :prop_tgt:`IMPORTED` target is being provided:
|
||||
|
||||
.. variable:: Iconv::Iconv
|
||||
|
||||
Imported target for using iconv.
|
||||
|
||||
The following cache variables may also be set:
|
||||
|
||||
.. variable:: Iconv_INCLUDE_DIR
|
||||
|
||||
The directory containing the iconv headers.
|
||||
|
||||
.. variable:: Iconv_LIBRARY
|
||||
|
||||
The iconv library (if not implicitly given in the C library).
|
||||
|
||||
.. note::
|
||||
On POSIX platforms, iconv might be part of the C library and the cache
|
||||
variables ``Iconv_INCLUDE_DIR`` and ``Iconv_LIBRARY`` might be empty.
|
||||
|
||||
#]=======================================================================]
|
||||
|
||||
include(CMakePushCheckState)
|
||||
if(CMAKE_C_COMPILER_LOADED)
|
||||
include(CheckCSourceCompiles)
|
||||
elseif(CMAKE_CXX_COMPILER_LOADED)
|
||||
include(CheckCXXSourceCompiles)
|
||||
else()
|
||||
# If neither C nor CXX are loaded, implicit iconv makes no sense.
|
||||
set(Iconv_IS_BUILT_IN NO)
|
||||
endif()
|
||||
|
||||
# iconv can only be provided in libc on a POSIX system.
|
||||
# If any cache variable is already set, we'll skip this test.
|
||||
if(NOT DEFINED Iconv_IS_BUILT_IN)
|
||||
if(UNIX AND NOT DEFINED Iconv_INCLUDE_DIR AND NOT DEFINED Iconv_LIBRARY)
|
||||
cmake_push_check_state(RESET)
|
||||
# We always suppress the message here: Otherwise on supported systems
|
||||
# not having iconv in their C library (e.g. those using libiconv)
|
||||
# would always display a confusing "Looking for iconv - not found" message
|
||||
set(CMAKE_FIND_QUIETLY TRUE)
|
||||
# The following code will not work, but it's sufficient to see if it compiles.
|
||||
# Note: libiconv will define the iconv functions as macros, so CheckSymbolExists
|
||||
# will not yield correct results.
|
||||
set(Iconv_IMPLICIT_TEST_CODE
|
||||
"
|
||||
#include <stddef.h>
|
||||
#include <iconv.h>
|
||||
int main() {
|
||||
char *a, *b;
|
||||
size_t i, j;
|
||||
iconv_t ic;
|
||||
ic = iconv_open(\"to\", \"from\");
|
||||
iconv(ic, &a, &i, &b, &j);
|
||||
iconv_close(ic);
|
||||
}
|
||||
"
|
||||
)
|
||||
if(CMAKE_C_COMPILER_LOADED)
|
||||
check_c_source_compiles("${Iconv_IMPLICIT_TEST_CODE}" Iconv_IS_BUILT_IN)
|
||||
else()
|
||||
check_cxx_source_compiles("${Iconv_IMPLICIT_TEST_CODE}" Iconv_IS_BUILT_IN)
|
||||
endif()
|
||||
cmake_pop_check_state()
|
||||
else()
|
||||
set(Iconv_IS_BUILT_IN NO)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT Iconv_IS_BUILT_IN)
|
||||
find_path(Iconv_INCLUDE_DIR
|
||||
NAMES "iconv.h"
|
||||
DOC "iconv include directory")
|
||||
set(Iconv_LIBRARY_NAMES "iconv" "libiconv")
|
||||
else()
|
||||
set(Iconv_INCLUDE_DIR "" CACHE FILEPATH "iconv include directory")
|
||||
set(Iconv_LIBRARY_NAMES "c")
|
||||
endif()
|
||||
|
||||
find_library(Iconv_LIBRARY
|
||||
NAMES ${Iconv_LIBRARY_NAMES}
|
||||
DOC "iconv library (potentially the C library)")
|
||||
|
||||
mark_as_advanced(Iconv_INCLUDE_DIR)
|
||||
mark_as_advanced(Iconv_LIBRARY)
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
if(NOT Iconv_IS_BUILT_IN)
|
||||
find_package_handle_standard_args(Iconv REQUIRED_VARS Iconv_LIBRARY Iconv_INCLUDE_DIR)
|
||||
else()
|
||||
find_package_handle_standard_args(Iconv REQUIRED_VARS Iconv_LIBRARY)
|
||||
endif()
|
||||
|
||||
if(Iconv_FOUND)
|
||||
set(Iconv_INCLUDE_DIRS "${Iconv_INCLUDE_DIR}")
|
||||
set(Iconv_LIBRARIES "${Iconv_LIBRARY}")
|
||||
if(NOT TARGET Iconv::Iconv)
|
||||
add_library(Iconv::Iconv INTERFACE IMPORTED)
|
||||
endif()
|
||||
set_property(TARGET Iconv::Iconv PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${Iconv_INCLUDE_DIRS}")
|
||||
set_property(TARGET Iconv::Iconv PROPERTY INTERFACE_LINK_LIBRARIES "${Iconv_LIBRARIES}")
|
||||
endif()
|
@@ -1,89 +0,0 @@
|
||||
#[=======================================================================[.rst:
|
||||
FindLZO
|
||||
-------
|
||||
|
||||
Finds the LZO library.
|
||||
|
||||
Result Variables
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
This will define the following variables:
|
||||
|
||||
``LZO_FOUND``
|
||||
True if the system has the LZO library.
|
||||
``LZO_INCLUDE_DIRS``
|
||||
Include directories needed to use LZO.
|
||||
``LZO_LIBRARIES``
|
||||
Libraries needed to link to LZO.
|
||||
``LZO_VERSION``
|
||||
The version of the LZO library which was found.
|
||||
|
||||
Cache Variables
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
The following cache variables may also be set:
|
||||
|
||||
``LZO_INCLUDE_DIR``
|
||||
The directory containing ``lzo/lzo1x.h``.
|
||||
``LZO_LIBRARY``
|
||||
The path to the LZO library.
|
||||
|
||||
#]=======================================================================]
|
||||
|
||||
find_package(PkgConfig QUIET)
|
||||
pkg_check_modules(PC_LZO QUIET lzo2)
|
||||
|
||||
find_path(LZO_INCLUDE_DIR
|
||||
NAMES lzo/lzo1x.h
|
||||
PATHS ${PC_LZO_INCLUDE_DIRS}
|
||||
)
|
||||
|
||||
find_library(LZO_LIBRARY
|
||||
NAMES lzo2
|
||||
PATHS ${PC_LZO_LIBRARY_DIRS}
|
||||
)
|
||||
|
||||
# With vcpkg, the library path should contain both 'debug' and 'optimized'
|
||||
# entries (see target_link_libraries() documentation for more information)
|
||||
#
|
||||
# NOTE: we only patch up when using vcpkg; the same issue might happen
|
||||
# when not using vcpkg, but this is non-trivial to fix, as we have no idea
|
||||
# what the paths are. With vcpkg we do. And we only official support vcpkg
|
||||
# with Windows.
|
||||
#
|
||||
# NOTE: this is based on the assumption that the debug file has the same
|
||||
# name as the optimized file. This is not always the case, but so far
|
||||
# experiences has shown that in those case vcpkg CMake files do the right
|
||||
# thing.
|
||||
if(VCPKG_TOOLCHAIN AND LZO_LIBRARY)
|
||||
if(LZO_LIBRARY MATCHES "/debug/")
|
||||
set(LZO_LIBRARY_DEBUG ${LZO_LIBRARY})
|
||||
string(REPLACE "/debug/lib/" "/lib/" LZO_LIBRARY_RELEASE ${LZO_LIBRARY})
|
||||
else()
|
||||
set(LZO_LIBRARY_RELEASE ${LZO_LIBRARY})
|
||||
string(REPLACE "/lib/" "/debug/lib/" LZO_LIBRARY_DEBUG ${LZO_LIBRARY})
|
||||
endif()
|
||||
include(SelectLibraryConfigurations)
|
||||
select_library_configurations(LZO)
|
||||
endif()
|
||||
|
||||
set(LZO_VERSION ${PC_LZO_VERSION})
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(LZO
|
||||
FOUND_VAR LZO_FOUND
|
||||
REQUIRED_VARS
|
||||
LZO_LIBRARY
|
||||
LZO_INCLUDE_DIR
|
||||
VERSION_VAR LZO_VERSION
|
||||
)
|
||||
|
||||
if(LZO_FOUND)
|
||||
set(LZO_LIBRARIES ${LZO_LIBRARY})
|
||||
set(LZO_INCLUDE_DIRS ${LZO_INCLUDE_DIR})
|
||||
endif()
|
||||
|
||||
mark_as_advanced(
|
||||
LZO_INCLUDE_DIR
|
||||
LZO_LIBRARY
|
||||
)
|
@@ -1,3 +0,0 @@
|
||||
if(NOT EXISTS ${PANDOC_EXECUTABLE})
|
||||
find_program(PANDOC_EXECUTABLE pandoc)
|
||||
endif()
|
@@ -1,17 +0,0 @@
|
||||
# Autodetect if SSE4.1 can be used. If so, the assumption is, so can the other
|
||||
# SSE version (SSE 2.0, SSSE 3.0).
|
||||
|
||||
include(CheckCXXSourceCompiles)
|
||||
set(CMAKE_REQUIRED_FLAGS "")
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" OR CMAKE_CXX_COMPILER_ID STREQUAL "Clang" OR CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang")
|
||||
set(CMAKE_REQUIRED_FLAGS "-msse4.1")
|
||||
endif()
|
||||
|
||||
check_cxx_source_compiles("
|
||||
#include <xmmintrin.h>
|
||||
#include <smmintrin.h>
|
||||
#include <tmmintrin.h>
|
||||
int main() { return 0; }"
|
||||
SSE_FOUND
|
||||
)
|
@@ -1,19 +0,0 @@
|
||||
# Autodetect if xaudio2 can be used.
|
||||
|
||||
include(CheckCXXSourceCompiles)
|
||||
set(CMAKE_REQUIRED_FLAGS "")
|
||||
|
||||
check_cxx_source_compiles("
|
||||
#include <cstdio>
|
||||
#include <windows.h>
|
||||
|
||||
#undef NTDDI_VERSION
|
||||
#undef _WIN32_WINNT
|
||||
|
||||
#define NTDDI_VERSION NTDDI_WIN8
|
||||
#define _WIN32_WINNT _WIN32_WINNT_WIN8
|
||||
|
||||
#include <xaudio2.h>
|
||||
int main() { printf(\"%s\\\\n\", XAUDIO2_DLL_A); return 0; }"
|
||||
XAUDIO2_FOUND
|
||||
)
|
@@ -1,231 +0,0 @@
|
||||
include(GNUInstallDirs)
|
||||
|
||||
# If requested, use FHS layout; otherwise fall back to a flat layout.
|
||||
if(OPTION_INSTALL_FHS)
|
||||
set(BINARY_DESTINATION_DIR "${CMAKE_INSTALL_BINDIR}")
|
||||
set(DATA_DESTINATION_DIR "${CMAKE_INSTALL_DATADIR}/${BINARY_NAME}")
|
||||
set(DOCS_DESTINATION_DIR "${CMAKE_INSTALL_DOCDIR}")
|
||||
set(MAN_DESTINATION_DIR "${CMAKE_INSTALL_MANDIR}")
|
||||
else()
|
||||
if(APPLE)
|
||||
set(BINARY_DESTINATION_DIR "../MacOS")
|
||||
else()
|
||||
set(BINARY_DESTINATION_DIR ".")
|
||||
endif()
|
||||
set(DATA_DESTINATION_DIR ".")
|
||||
set(DOCS_DESTINATION_DIR ".")
|
||||
set(MAN_DESTINATION_DIR ".")
|
||||
endif()
|
||||
|
||||
install(TARGETS openttd
|
||||
RUNTIME
|
||||
DESTINATION ${BINARY_DESTINATION_DIR}
|
||||
COMPONENT Runtime
|
||||
)
|
||||
|
||||
install(DIRECTORY
|
||||
${CMAKE_BINARY_DIR}/lang
|
||||
${CMAKE_BINARY_DIR}/baseset
|
||||
${CMAKE_BINARY_DIR}/ai
|
||||
${CMAKE_BINARY_DIR}/game
|
||||
${CMAKE_SOURCE_DIR}/bin/scripts
|
||||
DESTINATION ${DATA_DESTINATION_DIR}
|
||||
COMPONENT language_files
|
||||
REGEX "ai/[^\.]+$" EXCLUDE # Ignore subdirs in ai dir
|
||||
)
|
||||
|
||||
install(FILES
|
||||
${CMAKE_SOURCE_DIR}/COPYING.md
|
||||
${CMAKE_SOURCE_DIR}/README.md
|
||||
${CMAKE_SOURCE_DIR}/changelog.txt
|
||||
${CMAKE_SOURCE_DIR}/docs/multiplayer.md
|
||||
${CMAKE_SOURCE_DIR}/known-bugs.txt
|
||||
DESTINATION ${DOCS_DESTINATION_DIR}
|
||||
COMPONENT docs)
|
||||
|
||||
# A Linux manual only makes sense when using FHS. Otherwise it is a very odd
|
||||
# file with little context to what it is.
|
||||
if(OPTION_INSTALL_FHS)
|
||||
set(MAN_SOURCE_FILE ${CMAKE_SOURCE_DIR}/docs/openttd.6)
|
||||
set(MAN_BINARY_FILE ${CMAKE_BINARY_DIR}/docs/${BINARY_NAME}.6)
|
||||
install(CODE
|
||||
"
|
||||
execute_process(COMMAND ${CMAKE_COMMAND} -E copy ${MAN_SOURCE_FILE} ${MAN_BINARY_FILE})
|
||||
execute_process(COMMAND gzip -9 -n -f ${MAN_BINARY_FILE})
|
||||
"
|
||||
COMPONENT manual)
|
||||
install(FILES
|
||||
${MAN_BINARY_FILE}.gz
|
||||
DESTINATION ${MAN_DESTINATION_DIR}/man6
|
||||
COMPONENT manual)
|
||||
endif()
|
||||
|
||||
if(UNIX AND NOT APPLE)
|
||||
install(DIRECTORY
|
||||
${CMAKE_BINARY_DIR}/media/icons
|
||||
${CMAKE_BINARY_DIR}/media/pixmaps
|
||||
DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}
|
||||
COMPONENT media)
|
||||
|
||||
install(FILES
|
||||
${CMAKE_BINARY_DIR}/media/${BINARY_NAME}.desktop
|
||||
DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/applications
|
||||
COMPONENT menu)
|
||||
endif()
|
||||
|
||||
if(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64")
|
||||
set(ARCHITECTURE "amd64")
|
||||
else()
|
||||
string(TOLOWER "${CMAKE_SYSTEM_PROCESSOR}" ARCHITECTURE)
|
||||
endif()
|
||||
|
||||
# Windows is a bit more annoying to detect; using the size of void pointer
|
||||
# seems to be the most robust.
|
||||
if(WIN32)
|
||||
# Check if the MSVC platform has been defined
|
||||
if ("$ENV{Platform}" STREQUAL "arm64")
|
||||
set(ARCHITECTURE "arm64")
|
||||
else()
|
||||
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||
set(ARCHITECTURE "win64")
|
||||
else()
|
||||
set(ARCHITECTURE "win32")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
if(APPLE AND CMAKE_OSX_ARCHITECTURES)
|
||||
string(TOLOWER "${CMAKE_OSX_ARCHITECTURES}" ARCHITECTURE)
|
||||
endif()
|
||||
|
||||
set(CPACK_SYSTEM_NAME "${ARCHITECTURE}")
|
||||
|
||||
set(CPACK_PACKAGE_NAME "openttd")
|
||||
set(CPACK_PACKAGE_VENDOR "OpenTTD")
|
||||
set(CPACK_PACKAGE_DESCRIPTION "OpenTTD")
|
||||
set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "OpenTTD")
|
||||
set(CPACK_PACKAGE_HOMEPAGE_URL "https://www.openttd.org/")
|
||||
set(CPACK_PACKAGE_CONTACT "OpenTTD <info@openttd.org>")
|
||||
set(CPACK_PACKAGE_INSTALL_DIRECTORY "OpenTTD")
|
||||
set(CPACK_PACKAGE_CHECKSUM "SHA256")
|
||||
|
||||
if((APPLE OR WIN32) AND EXISTS ${PANDOC_EXECUTABLE})
|
||||
execute_process(COMMAND ${PANDOC_EXECUTABLE} "${CMAKE_SOURCE_DIR}/COPYING.md" -s -o "${CMAKE_BINARY_DIR}/COPYING.rtf")
|
||||
set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_BINARY_DIR}/COPYING.rtf")
|
||||
else()
|
||||
set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_SOURCE_DIR}/COPYING.md")
|
||||
endif()
|
||||
|
||||
set(CPACK_RESOURCE_FILE_README "${CMAKE_SOURCE_DIR}/README.md")
|
||||
set(CPACK_MONOLITHIC_INSTALL YES)
|
||||
set(CPACK_PACKAGE_EXECUTABLES "openttd;OpenTTD")
|
||||
set(CPACK_STRIP_FILES YES)
|
||||
set(CPACK_OUTPUT_FILE_PREFIX "bundles")
|
||||
|
||||
if(APPLE)
|
||||
set(CPACK_GENERATOR "Bundle")
|
||||
include(PackageBundle)
|
||||
|
||||
if (APPLE_UNIVERSAL_PACKAGE)
|
||||
set(CPACK_PACKAGE_FILE_NAME "openttd-#CPACK_PACKAGE_VERSION#-macos-universal")
|
||||
else()
|
||||
set(CPACK_PACKAGE_FILE_NAME "openttd-#CPACK_PACKAGE_VERSION#-macos-${CPACK_SYSTEM_NAME}")
|
||||
endif()
|
||||
elseif(WIN32)
|
||||
set(CPACK_GENERATOR "ZIP")
|
||||
if(OPTION_USE_NSIS)
|
||||
list(APPEND CPACK_GENERATOR "NSIS")
|
||||
include(PackageNSIS)
|
||||
endif()
|
||||
|
||||
set(CPACK_PACKAGE_FILE_NAME "openttd-#CPACK_PACKAGE_VERSION#-windows-${CPACK_SYSTEM_NAME}")
|
||||
|
||||
if(WINDOWS_CERTIFICATE_COMMON_NAME)
|
||||
add_custom_command(TARGET openttd
|
||||
POST_BUILD
|
||||
COMMAND "${CMAKE_SOURCE_DIR}/os/windows/sign.bat" "$<TARGET_FILE:openttd>" "${WINDOWS_CERTIFICATE_COMMON_NAME}"
|
||||
)
|
||||
endif()
|
||||
elseif(UNIX)
|
||||
# With FHS, we can create deb/rpm/... Without it, they would be horribly broken
|
||||
# and not work. The other way around is also true; with FHS they are not
|
||||
# usable, and only flat formats work.
|
||||
if(OPTION_PACKAGE_DEPENDENCIES)
|
||||
set(CPACK_GENERATOR "TXZ")
|
||||
set(PLATFORM "generic")
|
||||
elseif(NOT OPTION_INSTALL_FHS)
|
||||
set(CPACK_GENERATOR "TXZ")
|
||||
set(PLATFORM "unknown")
|
||||
else()
|
||||
find_program(LSB_RELEASE_EXEC lsb_release)
|
||||
execute_process(COMMAND ${LSB_RELEASE_EXEC} -is
|
||||
OUTPUT_VARIABLE LSB_RELEASE_ID
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
if(LSB_RELEASE_ID)
|
||||
if(LSB_RELEASE_ID STREQUAL "Ubuntu" OR LSB_RELEASE_ID STREQUAL "Debian")
|
||||
execute_process(COMMAND ${LSB_RELEASE_EXEC} -cs
|
||||
OUTPUT_VARIABLE LSB_RELEASE_CODENAME
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
string(TOLOWER "${LSB_RELEASE_ID}-${LSB_RELEASE_CODENAME}" PLATFORM)
|
||||
|
||||
set(CPACK_GENERATOR "DEB")
|
||||
include(PackageDeb)
|
||||
else()
|
||||
set(UNSUPPORTED_PLATFORM_NAME "LSB-based Linux distribution '${LSB_RELEASE_ID}'")
|
||||
endif()
|
||||
elseif(EXISTS "/etc/os-release")
|
||||
file(STRINGS "/etc/os-release" OS_RELEASE_CONTENTS REGEX "^ID=")
|
||||
string(REGEX MATCH "ID=(.*)" _ ${OS_RELEASE_CONTENTS})
|
||||
set(DISTRO_ID ${CMAKE_MATCH_1})
|
||||
if(DISTRO_ID STREQUAL "arch")
|
||||
set(PLATFORM "arch")
|
||||
set(CPACK_GENERATOR "TXZ")
|
||||
else()
|
||||
set(UNSUPPORTED_PLATFORM_NAME "Linux distribution '${DISTRO_ID}' from /etc/os-release")
|
||||
endif()
|
||||
else()
|
||||
set(UNSUPPORTED_PLATFORM_NAME "Linux distribution")
|
||||
endif()
|
||||
|
||||
if(NOT PLATFORM)
|
||||
set(PLATFORM "generic")
|
||||
set(CPACK_GENERATOR "TXZ")
|
||||
message(WARNING "Unknown ${UNSUPPORTED_PLATFORM_NAME} found for packaging; can only pack to a txz. Please consider creating a Pull Request to add support for this distribution.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(CPACK_PACKAGE_FILE_NAME "openttd-#CPACK_PACKAGE_VERSION#-linux-${PLATFORM}-${CPACK_SYSTEM_NAME}")
|
||||
|
||||
else()
|
||||
message(FATAL_ERROR "Unknown OS found for packaging; please consider creating a Pull Request to add support for this OS.")
|
||||
endif()
|
||||
|
||||
include(CPack)
|
||||
|
||||
if(OPTION_PACKAGE_DEPENDENCIES)
|
||||
# Install all dependencies we can resolve, with the exception of ones that
|
||||
# every Linux machine should have. This should make this build as generic
|
||||
# as possible, where it runs on any machine with the same or newer libc
|
||||
# than the one this is compiled with.
|
||||
# We copy these libraries into lib/ folder, so they can be found on game
|
||||
# startup. See comment in root CMakeLists.txt for how this works exactly.
|
||||
install(CODE [[
|
||||
file(GET_RUNTIME_DEPENDENCIES
|
||||
RESOLVED_DEPENDENCIES_VAR DEPENDENCIES
|
||||
UNRESOLVED_DEPENDENCIES_VAR UNRESOLVED_DEPENDENCIES
|
||||
EXECUTABLES openttd
|
||||
POST_EXCLUDE_REGEXES "ld-linux|libc.so|libdl.so|libm.so|libgcc_s.so|libpthread.so|librt.so|libstdc...so")
|
||||
file(INSTALL
|
||||
DESTINATION "${CMAKE_INSTALL_PREFIX}/lib"
|
||||
FILES ${DEPENDENCIES}
|
||||
FOLLOW_SYMLINK_CHAIN)
|
||||
|
||||
# This should not be possible, but error out when a dependency cannot
|
||||
# be resolved.
|
||||
list(LENGTH UNRESOLVED_DEPENDENCIES UNRESOLVED_LENGTH)
|
||||
if(${UNRESOLVED_LENGTH} GREATER 0)
|
||||
message(FATAL_ERROR "Unresolved dependencies: ${UNRESOLVED_DEPENDENCIES}")
|
||||
endif()
|
||||
]])
|
||||
endif()
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user