diff --git a/deplock.json b/deplock.json
index 0692f0c3..a8153394 100644
--- a/deplock.json
+++ b/deplock.json
@@ -2,7 +2,7 @@
"dep_locks": [
{
"url": "https://github.com/bitrise-core/bitrise-init.git",
- "revision": "a96564a4385140b60e0b1ca4314d626ed03273bd"
+ "revision": "b978e1a28b9d66dfaba3f33ac22cc41feacd5f80"
}
]
}
\ No newline at end of file
diff --git a/go/src/github.com/bitrise-core/bitrise-init/Godeps/Godeps.json b/go/src/github.com/bitrise-core/bitrise-init/Godeps/Godeps.json
index bf8ad712..b6be9586 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/Godeps/Godeps.json
+++ b/go/src/github.com/bitrise-core/bitrise-init/Godeps/Godeps.json
@@ -1,7 +1,7 @@
{
"ImportPath": "github.com/bitrise-core/bitrise-init",
"GoVersion": "go1.7",
- "GodepVersion": "v74",
+ "GodepVersion": "v75",
"Packages": [
"./..."
],
@@ -13,41 +13,41 @@
},
{
"ImportPath": "github.com/bitrise-io/bitrise/models",
- "Comment": "1.4.0-12-gfff55c4",
- "Rev": "fff55c49696a185eee006fec1527c80a1c90b707"
+ "Comment": "1.4.5-11-g50e3241",
+ "Rev": "50e3241184ee39d5d473c73f4bcb6987f99507fd"
},
{
"ImportPath": "github.com/bitrise-io/envman/models",
- "Comment": "1.1.1",
- "Rev": "d1c4f5bdab06ba1927460f9cbbac6eab5d337912"
+ "Comment": "1.1.2-1-g8c4944e",
+ "Rev": "8c4944e61db658e8127766fd7352913732bdf461"
},
{
"ImportPath": "github.com/bitrise-io/go-utils/cmdex",
- "Rev": "5c210e4691a5a75e5685d2e842a104ca55ec7697"
+ "Rev": "f9155f8d01013ec098cba431327cbeb063dacea7"
},
{
"ImportPath": "github.com/bitrise-io/go-utils/colorstring",
- "Rev": "5c210e4691a5a75e5685d2e842a104ca55ec7697"
+ "Rev": "f9155f8d01013ec098cba431327cbeb063dacea7"
},
{
"ImportPath": "github.com/bitrise-io/go-utils/errorutil",
- "Rev": "5c210e4691a5a75e5685d2e842a104ca55ec7697"
+ "Rev": "f9155f8d01013ec098cba431327cbeb063dacea7"
},
{
"ImportPath": "github.com/bitrise-io/go-utils/fileutil",
- "Rev": "5c210e4691a5a75e5685d2e842a104ca55ec7697"
+ "Rev": "f9155f8d01013ec098cba431327cbeb063dacea7"
},
{
"ImportPath": "github.com/bitrise-io/go-utils/parseutil",
- "Rev": "5c210e4691a5a75e5685d2e842a104ca55ec7697"
+ "Rev": "f9155f8d01013ec098cba431327cbeb063dacea7"
},
{
"ImportPath": "github.com/bitrise-io/go-utils/pathutil",
- "Rev": "5c210e4691a5a75e5685d2e842a104ca55ec7697"
+ "Rev": "f9155f8d01013ec098cba431327cbeb063dacea7"
},
{
"ImportPath": "github.com/bitrise-io/go-utils/pointers",
- "Rev": "5c210e4691a5a75e5685d2e842a104ca55ec7697"
+ "Rev": "f9155f8d01013ec098cba431327cbeb063dacea7"
},
{
"ImportPath": "github.com/bitrise-io/goinp/goinp",
@@ -55,17 +55,17 @@
},
{
"ImportPath": "github.com/bitrise-io/stepman/models",
- "Comment": "0.9.23-3-g9146f28",
- "Rev": "9146f28602ad25010f491c47ad95a0e8ad3fdb1e"
+ "Comment": "0.9.25-2-g80f3ca5",
+ "Rev": "80f3ca5e2c2fe2f3604685a8ed6b974c3d2dd7cb"
},
{
- "ImportPath": "github.com/bitrise-io/xcode-utils/xcodeproj",
- "Rev": "cc85e3b9f8025a57ffcc54f1c23c371960ed939c"
+ "ImportPath": "github.com/bitrise-tools/go-xcode/xcodeproj",
+ "Rev": "3f50256af6281dbf2b3b688abe153432a089d8d4"
},
{
"ImportPath": "github.com/davecgh/go-spew/spew",
- "Comment": "v1.0.0-3-g6d21280",
- "Rev": "6d212800a42e8ab5c146b8ace3490ee17e5225f9"
+ "Comment": "v1.1.0",
+ "Rev": "346938d642f2ec3594ed81d874461961cd0faa76"
},
{
"ImportPath": "github.com/pmezard/go-difflib/difflib",
@@ -79,13 +79,13 @@
},
{
"ImportPath": "github.com/stretchr/testify/assert",
- "Comment": "v1.1.4-4-g976c720",
- "Rev": "976c720a22c8eb4eb6a0b4348ad85ad12491a506"
+ "Comment": "v1.1.4-6-g18a02ba",
+ "Rev": "18a02ba4a312f95da08ff4cfc0055750ce50ae9e"
},
{
"ImportPath": "github.com/stretchr/testify/require",
- "Comment": "v1.1.4-4-g976c720",
- "Rev": "976c720a22c8eb4eb6a0b4348ad85ad12491a506"
+ "Comment": "v1.1.4-6-g18a02ba",
+ "Rev": "18a02ba4a312f95da08ff4cfc0055750ce50ae9e"
},
{
"ImportPath": "github.com/urfave/cli",
@@ -96,6 +96,10 @@
"ImportPath": "golang.org/x/sys/unix",
"Rev": "8f0908ab3b2457e2e15403d3697c9ef5cb4b57a9"
},
+ {
+ "ImportPath": "gopkg.in/yaml.v1",
+ "Rev": "9f9df34309c04878acc86042b16630b0f696e1de"
+ },
{
"ImportPath": "gopkg.in/yaml.v2",
"Rev": "31c299268d302dd0aa9a0dcf765a3d58971ac83f"
diff --git a/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/android_test.go b/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/android_test.go
new file mode 100644
index 00000000..92fab31a
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/android_test.go
@@ -0,0 +1,192 @@
+package integration
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/bitrise-core/bitrise-init/models"
+ "github.com/bitrise-core/bitrise-init/steps"
+ "github.com/bitrise-io/go-utils/cmdex"
+ "github.com/bitrise-io/go-utils/fileutil"
+ "github.com/bitrise-io/go-utils/pathutil"
+ "github.com/stretchr/testify/require"
+)
+
+func TestAndroid(t *testing.T) {
+ tmpDir, err := pathutil.NormalizedOSTempDirPath("__android__")
+ require.NoError(t, err)
+ defer func() {
+ require.NoError(t, os.RemoveAll(tmpDir))
+ }()
+
+ t.Log("sample-apps-android-sdk22")
+ {
+ sampleAppDir := filepath.Join(tmpDir, "sample-apps-android-sdk22")
+ sampleAppURL := "https://github.com/bitrise-samples/sample-apps-android-sdk22.git"
+ require.NoError(t, cmdex.GitClone(sampleAppURL, sampleAppDir))
+
+ cmd := cmdex.NewCommand(binPath(), "--ci", "config", "--dir", sampleAppDir, "--output-dir", sampleAppDir)
+ out, err := cmd.RunAndReturnTrimmedCombinedOutput()
+ require.NoError(t, err, out)
+
+ scanResultPth := filepath.Join(sampleAppDir, "result.yml")
+
+ result, err := fileutil.ReadStringFromFile(scanResultPth)
+ require.NoError(t, err)
+ require.Equal(t, strings.TrimSpace(sampleAppsAndroid22ResultYML), strings.TrimSpace(result))
+ }
+
+ t.Log("android-non-executable-gradlew")
+ {
+ sampleAppDir := filepath.Join(tmpDir, "android-non-executable-gradlew")
+ sampleAppURL := "https://github.com/bitrise-samples/android-non-executable-gradlew.git"
+ require.NoError(t, cmdex.GitClone(sampleAppURL, sampleAppDir))
+
+ cmd := cmdex.NewCommand(binPath(), "--ci", "config", "--dir", sampleAppDir, "--output-dir", sampleAppDir)
+ out, err := cmd.RunAndReturnTrimmedCombinedOutput()
+ require.NoError(t, err, out)
+
+ scanResultPth := filepath.Join(sampleAppDir, "result.yml")
+
+ result, err := fileutil.ReadStringFromFile(scanResultPth)
+ require.NoError(t, err)
+ require.Equal(t, strings.TrimSpace(androidNonExecutableGradlewResultYML), strings.TrimSpace(result))
+ }
+}
+
+var androidNonExecutableGradlewResultYML = fmt.Sprintf(`options:
+ android:
+ title: Path to the gradle file to use
+ env_key: GRADLE_BUILD_FILE_PATH
+ value_map:
+ build.gradle:
+ title: Gradle task to run
+ env_key: GRADLE_TASK
+ value_map:
+ assemble:
+ title: Gradlew file path
+ env_key: GRADLEW_PATH
+ value_map:
+ ./gradlew:
+ config: android-config
+ assembleDebug:
+ title: Gradlew file path
+ env_key: GRADLEW_PATH
+ value_map:
+ ./gradlew:
+ config: android-config
+ assembleRelease:
+ title: Gradlew file path
+ env_key: GRADLEW_PATH
+ value_map:
+ ./gradlew:
+ config: android-config
+configs:
+ android:
+ android-config: |
+ format_version: %s
+ default_step_lib_source: https://github.com/bitrise-io/bitrise-steplib.git
+ trigger_map:
+ - workflow: primary
+ pattern: '*'
+ is_pull_request_allowed: true
+ workflows:
+ primary:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - script@%s:
+ title: Update Android Extra packages
+ inputs:
+ - content: |
+ #!/bin/bash
+ set -ex
+
+ echo y | android update sdk --no-ui --all --filter platform-tools | grep 'package installed'
+
+ echo y | android update sdk --no-ui --all --filter extra-android-m2repository | grep 'package installed'
+ echo y | android update sdk --no-ui --all --filter extra-google-m2repository | grep 'package installed'
+ echo y | android update sdk --no-ui --all --filter extra-google-google_play_services | grep 'package installed'
+ - gradle-runner@%s:
+ inputs:
+ - gradle_file: $GRADLE_BUILD_FILE_PATH
+ - gradle_task: $GRADLE_TASK
+ - gradlew_path: $GRADLEW_PATH
+ - deploy-to-bitrise-io@%s: {}
+warnings:
+ android: []
+`, models.FormatVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.ScriptVersion, steps.GradleRunnerVersion, steps.DeployToBitriseIoVersion)
+
+var sampleAppsAndroid22ResultYML = fmt.Sprintf(`options:
+ android:
+ title: Path to the gradle file to use
+ env_key: GRADLE_BUILD_FILE_PATH
+ value_map:
+ build.gradle:
+ title: Gradle task to run
+ env_key: GRADLE_TASK
+ value_map:
+ assemble:
+ title: Gradlew file path
+ env_key: GRADLEW_PATH
+ value_map:
+ ./gradlew:
+ config: android-config
+ assembleDebug:
+ title: Gradlew file path
+ env_key: GRADLEW_PATH
+ value_map:
+ ./gradlew:
+ config: android-config
+ assembleRelease:
+ title: Gradlew file path
+ env_key: GRADLEW_PATH
+ value_map:
+ ./gradlew:
+ config: android-config
+configs:
+ android:
+ android-config: |
+ format_version: %s
+ default_step_lib_source: https://github.com/bitrise-io/bitrise-steplib.git
+ trigger_map:
+ - workflow: primary
+ pattern: '*'
+ is_pull_request_allowed: true
+ workflows:
+ primary:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - script@%s:
+ title: Update Android Extra packages
+ inputs:
+ - content: |
+ #!/bin/bash
+ set -ex
+
+ echo y | android update sdk --no-ui --all --filter platform-tools | grep 'package installed'
+
+ echo y | android update sdk --no-ui --all --filter extra-android-m2repository | grep 'package installed'
+ echo y | android update sdk --no-ui --all --filter extra-google-m2repository | grep 'package installed'
+ echo y | android update sdk --no-ui --all --filter extra-google-google_play_services | grep 'package installed'
+ - gradle-runner@%s:
+ inputs:
+ - gradle_file: $GRADLE_BUILD_FILE_PATH
+ - gradle_task: $GRADLE_TASK
+ - gradlew_path: $GRADLEW_PATH
+ - deploy-to-bitrise-io@%s: {}
+warnings:
+ android: []
+`, models.FormatVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.ScriptVersion, steps.GradleRunnerVersion, steps.DeployToBitriseIoVersion)
diff --git a/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/fastlane_test.go b/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/fastlane_test.go
new file mode 100644
index 00000000..1e13bca2
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/fastlane_test.go
@@ -0,0 +1,137 @@
+package integration
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/bitrise-core/bitrise-init/models"
+ "github.com/bitrise-core/bitrise-init/steps"
+ "github.com/bitrise-io/go-utils/cmdex"
+ "github.com/bitrise-io/go-utils/fileutil"
+ "github.com/bitrise-io/go-utils/pathutil"
+ "github.com/stretchr/testify/require"
+)
+
+func TestFastlane(t *testing.T) {
+ tmpDir, err := pathutil.NormalizedOSTempDirPath("fastlane")
+ require.NoError(t, err)
+ defer func() {
+ require.NoError(t, os.RemoveAll(tmpDir))
+ }()
+
+ t.Log("fastlane")
+ {
+ sampleAppDir := filepath.Join(tmpDir, "fastlane")
+ sampleAppURL := "https://github.com/bitrise-samples/fastlane.git"
+ require.NoError(t, cmdex.GitClone(sampleAppURL, sampleAppDir))
+
+ cmd := cmdex.NewCommand(binPath(), "--ci", "config", "--dir", sampleAppDir, "--output-dir", sampleAppDir)
+ out, err := cmd.RunAndReturnTrimmedCombinedOutput()
+ require.NoError(t, err, out)
+
+ scanResultPth := filepath.Join(sampleAppDir, "result.yml")
+
+ result, err := fileutil.ReadStringFromFile(scanResultPth)
+ require.NoError(t, err)
+ require.Equal(t, strings.TrimSpace(fastlaneResultYML), strings.TrimSpace(result))
+ }
+}
+
+var fastlaneResultYML = fmt.Sprintf(`options:
+ fastlane:
+ title: Working directory
+ env_key: FASTLANE_WORK_DIR
+ value_map:
+ BitriseFastlaneSample:
+ title: Fastlane lane
+ env_key: FASTLANE_LANE
+ value_map:
+ test:
+ config: fastlane-config
+ ios:
+ title: Project (or Workspace) path
+ env_key: BITRISE_PROJECT_PATH
+ value_map:
+ BitriseFastlaneSample/BitriseFastlaneSample.xcodeproj:
+ title: Scheme name
+ env_key: BITRISE_SCHEME
+ value_map:
+ BitriseFastlaneSample:
+ config: ios-test-config
+configs:
+ fastlane:
+ fastlane-config: |
+ format_version: %s
+ default_step_lib_source: https://github.com/bitrise-io/bitrise-steplib.git
+ app:
+ envs:
+ - FASTLANE_XCODE_LIST_TIMEOUT: "120"
+ trigger_map:
+ - workflow: primary
+ pattern: '*'
+ is_pull_request_allowed: true
+ workflows:
+ primary:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - fastlane@%s:
+ inputs:
+ - lane: $FASTLANE_LANE
+ - work_dir: $FASTLANE_WORK_DIR
+ - deploy-to-bitrise-io@%s: {}
+ ios:
+ ios-test-config: |
+ format_version: %s
+ default_step_lib_source: https://github.com/bitrise-io/bitrise-steplib.git
+ trigger_map:
+ - push_branch: '*'
+ workflow: primary
+ - pull_request_source_branch: '*'
+ workflow: primary
+ workflows:
+ deploy:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - xcode-test@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - xcode-archive@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - deploy-to-bitrise-io@%s: {}
+ primary:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - xcode-test@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - deploy-to-bitrise-io@%s: {}
+warnings:
+ fastlane: []
+ ios: []
+`, models.FormatVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.FastlaneVersion, steps.DeployToBitriseIoVersion,
+ models.FormatVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.XcodeTestVersion, steps.XcodeArchiveVersion, steps.DeployToBitriseIoVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.XcodeTestVersion, steps.DeployToBitriseIoVersion)
diff --git a/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/helper.go b/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/helper.go
new file mode 100644
index 00000000..fbd55914
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/helper.go
@@ -0,0 +1,7 @@
+package integration
+
+import "os"
+
+func binPath() string {
+ return os.Getenv("INTEGRATION_TEST_BINARY_PATH")
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/ios_test.go b/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/ios_test.go
new file mode 100644
index 00000000..83a1180f
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/ios_test.go
@@ -0,0 +1,302 @@
+package integration
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "strings"
+
+ "github.com/bitrise-core/bitrise-init/models"
+ "github.com/bitrise-core/bitrise-init/steps"
+ "github.com/bitrise-io/go-utils/cmdex"
+ "github.com/bitrise-io/go-utils/fileutil"
+ "github.com/bitrise-io/go-utils/pathutil"
+ "github.com/stretchr/testify/require"
+)
+
+func TestIOS(t *testing.T) {
+ tmpDir, err := pathutil.NormalizedOSTempDirPath("__ios__")
+ require.NoError(t, err)
+ defer func() {
+ require.NoError(t, os.RemoveAll(tmpDir))
+ }()
+
+ t.Log("ios-no-shared-schemes")
+ {
+ sampleAppDir := filepath.Join(tmpDir, "ios-no-shared-scheme")
+ sampleAppURL := "https://github.com/bitrise-samples/ios-no-shared-schemes.git"
+ require.NoError(t, cmdex.GitClone(sampleAppURL, sampleAppDir))
+
+ cmd := cmdex.NewCommand(binPath(), "--ci", "config", "--dir", sampleAppDir, "--output-dir", sampleAppDir)
+ out, err := cmd.RunAndReturnTrimmedCombinedOutput()
+ require.NoError(t, err, out)
+
+ scanResultPth := filepath.Join(sampleAppDir, "result.yml")
+
+ result, err := fileutil.ReadStringFromFile(scanResultPth)
+ require.NoError(t, err)
+ require.Equal(t, strings.TrimSpace(iosNoSharedSchemesResultYML), strings.TrimSpace(result))
+ }
+
+ t.Log("ios-cocoapods-at-root")
+ {
+ sampleAppDir := filepath.Join(tmpDir, "ios-cocoapods-at-root")
+ sampleAppURL := "https://github.com/bitrise-samples/ios-cocoapods-at-root.git"
+ require.NoError(t, cmdex.GitClone(sampleAppURL, sampleAppDir))
+
+ cmd := cmdex.NewCommand(binPath(), "--ci", "config", "--dir", sampleAppDir, "--output-dir", sampleAppDir)
+ out, err := cmd.RunAndReturnTrimmedCombinedOutput()
+ require.NoError(t, err, out)
+
+ scanResultPth := filepath.Join(sampleAppDir, "result.yml")
+
+ result, err := fileutil.ReadStringFromFile(scanResultPth)
+ require.NoError(t, err)
+ require.Equal(t, strings.TrimSpace(iosCocoapodsAtRootResultYML), strings.TrimSpace(result))
+ }
+
+ t.Log("sample-apps-ios-watchkit")
+ {
+ sampleAppDir := filepath.Join(tmpDir, "sample-apps-ios-watchkit")
+ sampleAppURL := "https://github.com/bitrise-io/sample-apps-ios-watchkit.git"
+ require.NoError(t, cmdex.GitClone(sampleAppURL, sampleAppDir))
+
+ cmd := cmdex.NewCommand(binPath(), "--ci", "config", "--dir", sampleAppDir, "--output-dir", sampleAppDir)
+ out, err := cmd.RunAndReturnTrimmedCombinedOutput()
+ require.NoError(t, err, out)
+
+ scanResultPth := filepath.Join(sampleAppDir, "result.yml")
+
+ result, err := fileutil.ReadStringFromFile(scanResultPth)
+ require.NoError(t, err)
+ require.Equal(t, strings.TrimSpace(sampleAppsIosWatchkitResultYML), strings.TrimSpace(result))
+ }
+}
+
+var sampleAppsIosWatchkitResultYML = fmt.Sprintf(`options:
+ ios:
+ title: Project (or Workspace) path
+ env_key: BITRISE_PROJECT_PATH
+ value_map:
+ watch-test.xcodeproj:
+ title: Scheme name
+ env_key: BITRISE_SCHEME
+ value_map:
+ Complication - watch-test WatchKit App:
+ config: ios-config
+ Glance - watch-test WatchKit App:
+ config: ios-config
+ Notification - watch-test WatchKit App:
+ config: ios-config
+ watch-test:
+ config: ios-test-config
+ watch-test WatchKit App:
+ config: ios-config
+configs:
+ ios:
+ ios-config: |
+ format_version: %s
+ default_step_lib_source: https://github.com/bitrise-io/bitrise-steplib.git
+ trigger_map:
+ - push_branch: '*'
+ workflow: primary
+ - pull_request_source_branch: '*'
+ workflow: primary
+ workflows:
+ deploy:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - xcode-archive@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - deploy-to-bitrise-io@%s: {}
+ primary:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - deploy-to-bitrise-io@%s: {}
+ ios-test-config: |
+ format_version: %s
+ default_step_lib_source: https://github.com/bitrise-io/bitrise-steplib.git
+ trigger_map:
+ - push_branch: '*'
+ workflow: primary
+ - pull_request_source_branch: '*'
+ workflow: primary
+ workflows:
+ deploy:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - xcode-test@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - xcode-archive@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - deploy-to-bitrise-io@%s: {}
+ primary:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - xcode-test@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - deploy-to-bitrise-io@%s: {}
+warnings:
+ ios: []
+`, models.FormatVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.XcodeArchiveVersion, steps.DeployToBitriseIoVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.DeployToBitriseIoVersion,
+ models.FormatVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.XcodeTestVersion, steps.XcodeArchiveVersion, steps.DeployToBitriseIoVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.XcodeTestVersion, steps.DeployToBitriseIoVersion)
+
+var iosCocoapodsAtRootResultYML = fmt.Sprintf(`options:
+ ios:
+ title: Project (or Workspace) path
+ env_key: BITRISE_PROJECT_PATH
+ value_map:
+ iOSMinimalCocoaPodsSample.xcodeproj:
+ title: Scheme name
+ env_key: BITRISE_SCHEME
+ value_map:
+ iOSMinimalCocoaPodsSample:
+ config: ios-test-config
+configs:
+ ios:
+ ios-test-config: |
+ format_version: %s
+ default_step_lib_source: https://github.com/bitrise-io/bitrise-steplib.git
+ trigger_map:
+ - push_branch: '*'
+ workflow: primary
+ - pull_request_source_branch: '*'
+ workflow: primary
+ workflows:
+ deploy:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - xcode-test@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - xcode-archive@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - deploy-to-bitrise-io@%s: {}
+ primary:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - xcode-test@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - deploy-to-bitrise-io@%s: {}
+warnings:
+ ios: []
+`, models.FormatVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.XcodeTestVersion, steps.XcodeArchiveVersion, steps.DeployToBitriseIoVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.XcodeTestVersion, steps.DeployToBitriseIoVersion)
+
+var iosNoSharedSchemesResultYML = fmt.Sprintf(`options:
+ ios:
+ title: Project (or Workspace) path
+ env_key: BITRISE_PROJECT_PATH
+ value_map:
+ BitriseXcode7Sample.xcodeproj:
+ title: Scheme name
+ env_key: BITRISE_SCHEME
+ value_map:
+ BitriseXcode7Sample:
+ config: ios-test-missing-shared-schemes-config
+configs:
+ ios:
+ ios-test-missing-shared-schemes-config: |
+ format_version: %s
+ default_step_lib_source: https://github.com/bitrise-io/bitrise-steplib.git
+ trigger_map:
+ - push_branch: '*'
+ workflow: primary
+ - pull_request_source_branch: '*'
+ workflow: primary
+ workflows:
+ deploy:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - recreate-user-schemes@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - xcode-test@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - xcode-archive@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - deploy-to-bitrise-io@%s: {}
+ primary:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - recreate-user-schemes@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - xcode-test@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - deploy-to-bitrise-io@%s: {}
+warnings:
+ ios:
+ - "No shared schemes found for project: BitriseXcode7Sample.xcodeproj.\n\tAutomatically
+ generated schemes for this project.\n\tThese schemes may differ from the ones
+ in your project.\n\tMake sure to share
+ your schemes for the expected behaviour."
+`, models.FormatVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.RecreateUserSchemesVersion, steps.XcodeTestVersion, steps.XcodeArchiveVersion, steps.DeployToBitriseIoVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.RecreateUserSchemesVersion, steps.XcodeTestVersion, steps.DeployToBitriseIoVersion)
diff --git a/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/mac_test.go b/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/mac_test.go
new file mode 100644
index 00000000..2f63954c
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/mac_test.go
@@ -0,0 +1,100 @@
+package integration
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/bitrise-core/bitrise-init/models"
+ "github.com/bitrise-core/bitrise-init/steps"
+ "github.com/bitrise-io/go-utils/cmdex"
+ "github.com/bitrise-io/go-utils/fileutil"
+ "github.com/bitrise-io/go-utils/pathutil"
+ "github.com/stretchr/testify/require"
+)
+
+func _TestMacOS(t *testing.T) {
+ tmpDir, err := pathutil.NormalizedOSTempDirPath("__macos__")
+ require.NoError(t, err)
+ defer func() {
+ require.NoError(t, os.RemoveAll(tmpDir))
+ }()
+
+ t.Log("sample-apps-osx-10-11")
+ {
+ sampleAppDir := filepath.Join(tmpDir, "sample-apps-osx-10-11")
+ sampleAppURL := "https://github.com/bitrise-samples/sample-apps-osx-10-11.git"
+ require.NoError(t, cmdex.GitClone(sampleAppURL, sampleAppDir))
+
+ cmd := cmdex.NewCommand(binPath(), "--ci", "config", "--dir", sampleAppDir, "--output-dir", sampleAppDir)
+ out, err := cmd.RunAndReturnTrimmedCombinedOutput()
+ require.NoError(t, err, out)
+
+ scanResultPth := filepath.Join(sampleAppDir, "result.yml")
+
+ result, err := fileutil.ReadStringFromFile(scanResultPth)
+ require.NoError(t, err)
+ require.Equal(t, strings.TrimSpace(sampleAppsOSX1011ResultYML), strings.TrimSpace(result))
+ }
+}
+
+var sampleAppsOSX1011ResultYML = fmt.Sprintf(`options:
+ macos:
+ title: Project (or Workspace) path
+ env_key: BITRISE_PROJECT_PATH
+ value_map:
+ sample-apps-osx-10-11.xcodeproj:
+ title: Scheme name
+ env_key: BITRISE_SCHEME
+ value_map:
+ sample-apps-osx-10-11:
+ config: macos-test-config
+configs:
+ macos:
+ macos-test-config: |
+ format_version: %s
+ default_step_lib_source: https://github.com/bitrise-io/bitrise-steplib.git
+ app: {}
+ trigger_map:
+ - push_branch: '*'
+ workflow: primary
+ - pull_request_source_branch: '*'
+ workflow: primary
+ workflows:
+ deploy:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - xcode-test-mac@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - xcode-archive-mac@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - deploy-to-bitrise-io@%s: {}
+ primary:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - xcode-test-mac@%s:
+ inputs:
+ - project_path: $BITRISE_PROJECT_PATH
+ - scheme: $BITRISE_SCHEME
+ - deploy-to-bitrise-io@%s: {}
+warnings:
+ macos: []
+`, models.FormatVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.XcodeTestMacVersion, steps.XcodeArchiveMacVersion, steps.DeployToBitriseIoVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.XcodeTestMacVersion, steps.DeployToBitriseIoVersion)
diff --git a/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/xamarin_test.go b/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/xamarin_test.go
new file mode 100644
index 00000000..0621367b
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/_tests/integration/xamarin_test.go
@@ -0,0 +1,251 @@
+package integration
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/bitrise-core/bitrise-init/models"
+ "github.com/bitrise-core/bitrise-init/steps"
+ "github.com/bitrise-io/go-utils/cmdex"
+ "github.com/bitrise-io/go-utils/fileutil"
+ "github.com/bitrise-io/go-utils/pathutil"
+ "github.com/stretchr/testify/require"
+)
+
+func TestXamarin(t *testing.T) {
+ tmpDir, err := pathutil.NormalizedOSTempDirPath("__xamarin__")
+ require.NoError(t, err)
+ defer func() {
+ require.NoError(t, os.RemoveAll(tmpDir))
+ }()
+
+ t.Log("xamarin-sample-app")
+ {
+ sampleAppDir := filepath.Join(tmpDir, "xamarin-sample-app")
+ sampleAppURL := "https://github.com/bitrise-samples/xamarin-sample-app.git"
+ require.NoError(t, cmdex.GitClone(sampleAppURL, sampleAppDir))
+
+ cmd := cmdex.NewCommand(binPath(), "--ci", "config", "--dir", sampleAppDir, "--output-dir", sampleAppDir)
+ out, err := cmd.RunAndReturnTrimmedCombinedOutput()
+ require.NoError(t, err, out)
+
+ scanResultPth := filepath.Join(sampleAppDir, "result.yml")
+
+ result, err := fileutil.ReadStringFromFile(scanResultPth)
+ require.NoError(t, err)
+ require.Equal(t, strings.TrimSpace(xamarinSampleAppResultYML), strings.TrimSpace(result))
+ }
+
+ t.Log("sample-apps-xamarin-ios")
+ {
+ sampleAppDir := filepath.Join(tmpDir, "sample-apps-xamarin-ios")
+ sampleAppURL := "https://github.com/bitrise-io/sample-apps-xamarin-ios.git"
+ require.NoError(t, cmdex.GitClone(sampleAppURL, sampleAppDir))
+
+ cmd := cmdex.NewCommand(binPath(), "--ci", "config", "--dir", sampleAppDir, "--output-dir", sampleAppDir)
+ out, err := cmd.RunAndReturnTrimmedCombinedOutput()
+ require.NoError(t, err, out)
+
+ scanResultPth := filepath.Join(sampleAppDir, "result.yml")
+
+ result, err := fileutil.ReadStringFromFile(scanResultPth)
+ require.NoError(t, err)
+ require.Equal(t, strings.TrimSpace(sampleAppsXamarinIosResultYML), strings.TrimSpace(result))
+ }
+
+ t.Log("sample-apps-xamarin-android")
+ {
+ sampleAppDir := filepath.Join(tmpDir, "sample-apps-xamarin-android")
+ sampleAppURL := "https://github.com/bitrise-io/sample-apps-xamarin-android.git"
+ require.NoError(t, cmdex.GitClone(sampleAppURL, sampleAppDir))
+
+ cmd := cmdex.NewCommand(binPath(), "--ci", "config", "--dir", sampleAppDir, "--output-dir", sampleAppDir)
+ out, err := cmd.RunAndReturnTrimmedCombinedOutput()
+ require.NoError(t, err, out)
+
+ scanResultPth := filepath.Join(sampleAppDir, "result.yml")
+
+ result, err := fileutil.ReadStringFromFile(scanResultPth)
+ require.NoError(t, err)
+ require.Equal(t, strings.TrimSpace(sampleAppsXamarinAndroidResultYML), strings.TrimSpace(result))
+ }
+}
+
+var sampleAppsXamarinAndroidResultYML = fmt.Sprintf(`options:
+ xamarin:
+ title: Path to the Xamarin Solution file
+ env_key: BITRISE_PROJECT_PATH
+ value_map:
+ CreditCardValidator.Droid.sln:
+ title: Xamarin solution configuration
+ env_key: BITRISE_XAMARIN_CONFIGURATION
+ value_map:
+ Debug:
+ title: Xamarin solution platform
+ env_key: BITRISE_XAMARIN_PLATFORM
+ value_map:
+ Any CPU:
+ config: xamarin-nuget-config
+ Release:
+ title: Xamarin solution platform
+ env_key: BITRISE_XAMARIN_PLATFORM
+ value_map:
+ Any CPU:
+ config: xamarin-nuget-config
+configs:
+ xamarin:
+ xamarin-nuget-config: |
+ format_version: %s
+ default_step_lib_source: https://github.com/bitrise-io/bitrise-steplib.git
+ trigger_map:
+ - workflow: primary
+ pattern: '*'
+ is_pull_request_allowed: true
+ workflows:
+ primary:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - xamarin-user-management@%s:
+ run_if: .IsCI
+ - nuget-restore@%s: {}
+ - xamarin-archive@%s:
+ inputs:
+ - xamarin_solution: $BITRISE_PROJECT_PATH
+ - xamarin_configuration: $BITRISE_XAMARIN_CONFIGURATION
+ - xamarin_platform: $BITRISE_XAMARIN_PLATFORM
+ - deploy-to-bitrise-io@%s: {}
+warnings:
+ xamarin: []
+`, models.FormatVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.XamarinUserManagementVersion, steps.NugetRestoreVersion, steps.XamarinArchiveVersion, steps.DeployToBitriseIoVersion)
+
+var sampleAppsXamarinIosResultYML = fmt.Sprintf(`options:
+ xamarin:
+ title: Path to the Xamarin Solution file
+ env_key: BITRISE_PROJECT_PATH
+ value_map:
+ CreditCardValidator.iOS.sln:
+ title: Xamarin solution configuration
+ env_key: BITRISE_XAMARIN_CONFIGURATION
+ value_map:
+ Debug:
+ title: Xamarin solution platform
+ env_key: BITRISE_XAMARIN_PLATFORM
+ value_map:
+ Any CPU:
+ config: xamarin-nuget-config
+ iPhone:
+ config: xamarin-nuget-config
+ iPhoneSimulator:
+ config: xamarin-nuget-config
+ Release:
+ title: Xamarin solution platform
+ env_key: BITRISE_XAMARIN_PLATFORM
+ value_map:
+ Any CPU:
+ config: xamarin-nuget-config
+ iPhone:
+ config: xamarin-nuget-config
+ iPhoneSimulator:
+ config: xamarin-nuget-config
+configs:
+ xamarin:
+ xamarin-nuget-config: |
+ format_version: %s
+ default_step_lib_source: https://github.com/bitrise-io/bitrise-steplib.git
+ trigger_map:
+ - workflow: primary
+ pattern: '*'
+ is_pull_request_allowed: true
+ workflows:
+ primary:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - xamarin-user-management@%s:
+ run_if: .IsCI
+ - nuget-restore@%s: {}
+ - xamarin-archive@%s:
+ inputs:
+ - xamarin_solution: $BITRISE_PROJECT_PATH
+ - xamarin_configuration: $BITRISE_XAMARIN_CONFIGURATION
+ - xamarin_platform: $BITRISE_XAMARIN_PLATFORM
+ - deploy-to-bitrise-io@%s: {}
+warnings:
+ xamarin: []
+`, models.FormatVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.XamarinUserManagementVersion, steps.NugetRestoreVersion, steps.XamarinArchiveVersion, steps.DeployToBitriseIoVersion)
+
+var xamarinSampleAppResultYML = fmt.Sprintf(`options:
+ xamarin:
+ title: Path to the Xamarin Solution file
+ env_key: BITRISE_PROJECT_PATH
+ value_map:
+ XamarinSampleApp.sln:
+ title: Xamarin solution configuration
+ env_key: BITRISE_XAMARIN_CONFIGURATION
+ value_map:
+ Debug:
+ title: Xamarin solution platform
+ env_key: BITRISE_XAMARIN_PLATFORM
+ value_map:
+ Any CPU:
+ config: xamarin-nuget-components-config
+ iPhone:
+ config: xamarin-nuget-components-config
+ iPhoneSimulator:
+ config: xamarin-nuget-components-config
+ Release:
+ title: Xamarin solution platform
+ env_key: BITRISE_XAMARIN_PLATFORM
+ value_map:
+ Any CPU:
+ config: xamarin-nuget-components-config
+ iPhone:
+ config: xamarin-nuget-components-config
+ iPhoneSimulator:
+ config: xamarin-nuget-components-config
+configs:
+ xamarin:
+ xamarin-nuget-components-config: |
+ format_version: %s
+ default_step_lib_source: https://github.com/bitrise-io/bitrise-steplib.git
+ trigger_map:
+ - workflow: primary
+ pattern: '*'
+ is_pull_request_allowed: true
+ workflows:
+ primary:
+ steps:
+ - activate-ssh-key@%s:
+ run_if: '{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}'
+ - git-clone@%s: {}
+ - script@%s:
+ title: Do anything with Script step
+ - certificate-and-profile-installer@%s: {}
+ - xamarin-user-management@%s:
+ run_if: .IsCI
+ - nuget-restore@%s: {}
+ - xamarin-components-restore@%s: {}
+ - xamarin-archive@%s:
+ inputs:
+ - xamarin_solution: $BITRISE_PROJECT_PATH
+ - xamarin_configuration: $BITRISE_XAMARIN_CONFIGURATION
+ - xamarin_platform: $BITRISE_XAMARIN_PLATFORM
+ - deploy-to-bitrise-io@%s: {}
+warnings:
+ xamarin: []
+`, models.FormatVersion,
+ steps.ActivateSSHKeyVersion, steps.GitCloneVersion, steps.ScriptVersion, steps.CertificateAndProfileInstallerVersion, steps.XamarinUserManagementVersion, steps.NugetRestoreVersion, steps.XamarinComponentsRestoreVersion, steps.XamarinArchiveVersion, steps.DeployToBitriseIoVersion)
diff --git a/go/src/github.com/bitrise-core/bitrise-init/bitrise.yml b/go/src/github.com/bitrise-core/bitrise-init/bitrise.yml
index 34394325..23c07f75 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/bitrise.yml
+++ b/go/src/github.com/bitrise-core/bitrise-init/bitrise.yml
@@ -1,11 +1,6 @@
-format_version: 1.2.0
+format_version: 1.3.1
default_step_lib_source: https://github.com/bitrise-io/bitrise-steplib.git
-trigger_map:
-- pattern: "*"
- is_pull_request_allowed: true
- workflow: ci
-
app:
envs:
- RELEASE_VERSION: 0.11.1
@@ -19,7 +14,10 @@ workflows:
before_run:
- _prepare_and_setup
after_run:
- - test
+ - go-test
+ - integration-test
+
+ go-test:
steps:
- script:
title: Print go environment
@@ -60,418 +58,79 @@ workflows:
inputs:
- content: go test ./...
- test:
- title: Bitrise Init Test
- description: Bitrise Init Test
- before_run:
- - _install_init_tool
- steps:
- - script:
- title: "Scanner test: ios-no-shared-schemes"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-samples/ios-no-shared-schemes.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN --ci config --dir ./_tmp/sample-repo --output-dir ./_tmp/output --format json
- - script:
- title: "Scanner test: ios-cocoapods-at-root"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-samples/ios-cocoapods-at-root.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN --ci config --dir ./_tmp/sample-repo --output-dir ./_tmp/output --format json
- - script:
- title: "Scanner test: sample-apps-ios-simple-objc"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-io/sample-apps-ios-simple-objc.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN --ci config --dir ./_tmp/sample-repo --output-dir ./_tmp/output --format json
- - script:
- title: "Scanner test: sample-apps-android-sdk22"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-samples/sample-apps-android-sdk22.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN --ci config --dir ./_tmp/sample-repo --output-dir ./_tmp/output --format json
- - script:
- title: "Scanner test: xamarin-sample-app"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-samples/xamarin-sample-app.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN --ci config --dir ./_tmp/sample-repo --output-dir ./_tmp/output --format json
- - script:
- title: "Scanner test: sample-apps-ios-watchkit"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-io/sample-apps-ios-watchkit.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN --ci config --dir ./_tmp/sample-repo --output-dir ./_tmp/output --format json
- - script:
- title: "Scanner test: fastlane"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-samples/fastlane.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN --ci config --dir ./_tmp/sample-repo --output-dir ./_tmp/output --format json
- - script:
- title: "Scanner test: sample-apps-xamarin-ios"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-io/sample-apps-xamarin-ios.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN --ci config --dir ./_tmp/sample-repo --output-dir ./_tmp/output --format json
- - script:
- title: "Scanner test: android-non-executable-gradlew"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-samples/android-non-executable-gradlew.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN --ci config --dir ./_tmp/sample-repo --output-dir ./_tmp/output --format json
- - script:
- title: "Scanner test: sample-apps-xamarin-android"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-io/sample-apps-xamarin-android.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN --ci config --dir ./_tmp/sample-repo --output-dir ./_tmp/output --format json
-
- # Scanner test with bitrise run config test
- local_test_with_bitrise_run:
- before_run:
- - _install_init_tool
- after_run:
- - _test_ios-no-shared-schemes
- - _test_ios-cocoapods-at-root
- - _test_sample-apps-ios-simple-objc
- - _test_sample-apps-android-sdk22
- - _test_xamarin-sample-app
- - _test_sample-apps-ios-watchkit
- - _test_fastlane
- - _test_sample-apps-xamarin-ios
- - _test_android-non-executable-gradlew
- - _test_sample-apps-xamarin-android
-
- _test_ios-no-shared-schemes:
- steps:
- - script:
- title: "Scanner test: ios-no-shared-schemes"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-samples/ios-no-shared-schemes.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN config --dir ./_tmp/sample-repo --output-dir ./_tmp/output
- - change-workdir:
- title: Switch working dir
- inputs:
- - path: ./_tmp/sample-repo
- - is_create_path: true
- - script:
- title: "config test"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- config=$(find ./../output -name "*.yml" -print -quit)
- bitrise run primary --config $config
-
- _test_ios-cocoapods-at-root:
- steps:
- - script:
- title: "Scanner test: ios-cocoapods-at-root"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-samples/ios-cocoapods-at-root.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN config --dir ./_tmp/sample-repo --output-dir ./_tmp/output
- - change-workdir:
- title: Switch working dir
- inputs:
- - path: ./_tmp/sample-repo
- - is_create_path: true
- - script:
- title: "config test"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- config=$(find ./../output -name "*.yml" -print -quit)
- bitrise run primary --config $config
-
- _test_sample-apps-ios-simple-objc:
- steps:
- - script:
- title: "Scanner test: sample-apps-ios-simple-objc"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-io/sample-apps-ios-simple-objc.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN config --dir ./_tmp/sample-repo --output-dir ./_tmp/output
- - change-workdir:
- title: Switch working dir
- inputs:
- - path: ./_tmp/sample-repo
- - is_create_path: true
- - script:
- title: "config test"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- config=$(find ./../output -name "*.yml" -print -quit)
- bitrise run primary --config $config
-
- _test_sample-apps-android-sdk22:
- steps:
- - script:
- title: "Scanner test: sample-apps-android-sdk22"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-samples/sample-apps-android-sdk22.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN config --dir ./_tmp/sample-repo --output-dir ./_tmp/output
- - change-workdir:
- title: Switch working dir
- inputs:
- - path: ./_tmp/sample-repo
- - is_create_path: true
- - script:
- title: "config test"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- config=$(find ./../output -name "*.yml" -print -quit)
- bitrise run primary --config $config
-
- _test_xamarin-sample-app:
- steps:
- - script:
- title: "Scanner test: xamarin-sample-app"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-samples/xamarin-sample-app.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN config --dir ./_tmp/sample-repo --output-dir ./_tmp/output
- - change-workdir:
- title: Switch working dir
- inputs:
- - path: ./_tmp/sample-repo
- - is_create_path: true
- - script:
- title: "config test"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- config=$(find ./../output -name "*.yml" -print -quit)
- bitrise run primary --config $config
-
- _test_sample-apps-ios-watchkit:
+ integration-test:
steps:
- script:
- title: "Scanner test: sample-apps-ios-watchkit"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-io/sample-apps-ios-watchkit.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN config --dir ./_tmp/sample-repo --output-dir ./_tmp/output
- - change-workdir:
- title: Switch working dir
- inputs:
- - path: ./_tmp/sample-repo
- - is_create_path: true
- - script:
- title: "config test"
+ title: Go build
inputs:
- content: |-
#!/bin/bash
- set -x
-
- config=$(find ./../output -name "*.yml" -print -quit)
- bitrise run primary --config $config
+ set -ex
- _test_fastlane:
- steps:
- - script:
- title: "Scanner test: fastlane"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
+ # build the new bitrise
+ current_dir=$(pwd)
+ current_bitrise_init=$current_dir/_tmp/ci-bin
+ go build -o $current_bitrise_init
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-samples/fastlane.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN config --dir ./_tmp/sample-repo --output-dir ./_tmp/output
- - change-workdir:
- title: Switch working dir
- inputs:
- - path: ./_tmp/sample-repo
- - is_create_path: true
+ envman add --key CURRENT_BITRISE_INIT --value $current_bitrise_init
- script:
- title: "config test"
+ title: Run integration tests
inputs:
- content: |-
#!/bin/bash
- set -x
+ echo "Running integration tests ..."
+ set -ex
- config=$(find ./../output -name "*.yml" -print -quit)
- bitrise run primary --config $config
+ export INTEGRATION_TEST_BINARY_PATH="$CURRENT_BITRISE_INIT"
+ go test ./_tests/integration/...
- _test_sample-apps-xamarin-ios:
+ # ----------------------------------------------------------------
+ # --- workflows for Utility
+ _prepare_and_setup:
+ title: Prepare bitrise and install testing tools
+ description: |
+ Prepares the environment for testing
steps:
- script:
- title: "Scanner test: sample-apps-xamarin-ios"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
-
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-io/sample-apps-xamarin-ios.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN config --dir ./_tmp/sample-repo --output-dir ./_tmp/output
- - change-workdir:
- title: Switch working dir
- inputs:
- - path: ./_tmp/sample-repo
- - is_create_path: true
- - script:
- title: "config test"
+ title: Install testing tools
+ run_if: ".IsCI"
inputs:
- content: |-
#!/bin/bash
+ set -e
set -x
- config=$(find ./../output -name "*.yml" -print -quit)
- bitrise run primary --config $config
-
- _test_android-non-executable-gradlew:
- steps:
- - script:
- title: "Scanner test: android-non-executable-gradlew"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
+ # Install dependencies
+ go get -u github.com/tools/godep
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-samples/android-non-executable-gradlew.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN config --dir ./_tmp/sample-repo --output-dir ./_tmp/output
- - change-workdir:
- title: Switch working dir
- inputs:
- - path: ./_tmp/sample-repo
- - is_create_path: true
- - script:
- title: "config test"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
+ # Check for unhandled errors
+ go get -u github.com/kisielk/errcheck
- config=$(find ./../output -name "*.yml" -print -quit)
- bitrise run primary --config $config
+ # Go lint
+ go get -u github.com/golang/lint/golint
- _test_sample-apps-xamarin-android:
+ godeps-update:
+ title: Godeps update
+ description: |
+ Used for updating bitrise dependencies with godep
steps:
- script:
- title: "Scanner test: sample-apps-xamarin-android"
+ title: Dependency update
inputs:
- - content: |-
+ - content: |
#!/bin/bash
- set -x
+ set -ex
+ go get -u -v github.com/tools/godep
- rm -rf ./_tmp
- SAMPLE_APP_REPOSITORY_URL=https://github.com/bitrise-io/sample-apps-xamarin-android.git
- git clone $SAMPLE_APP_REPOSITORY_URL ./_tmp/sample-repo
- $SCANNER_BIN config --dir ./_tmp/sample-repo --output-dir ./_tmp/output
- - change-workdir:
- title: Switch working dir
- inputs:
- - path: ./_tmp/sample-repo
- - is_create_path: true
- - script:
- title: "config test"
- inputs:
- - content: |-
- #!/bin/bash
- set -x
+ rm -rf ./Godeps
+ rm -rf ./vendor
- config=$(find ./../output -name "*.yml" -print -quit)
- bitrise run primary --config $config
+ go get -t -d ./...
+ go get golang.org/x/sys/unix
+ go get github.com/davecgh/go-spew/spew
+ go get github.com/pmezard/go-difflib/difflib
+ go get github.com/ryanuber/go-glob
+ godep save ./...
# ----------------------------------------------------------------
# --- workflows for Releasing
@@ -550,92 +209,4 @@ workflows:
envman add --key LINUX_DEPLOY_PATH --value $DEPLOY_PATH
cp $DEPLOY_PATH $BITRISE_DEPLOY_DIR/$BIN_NAME-$OS-$ARCH
- echo " Copy final Linux binary to: $BITRISE_DEPLOY_DIR/$BIN_NAME-$OS-$ARCH"
-
- # ----------------------------------------------------------------
- # --- workflows for Utility
- _prepare_and_setup:
- title: Prepare bitrise and install testing tools
- description: |
- Prepares the environment for testing
- steps:
- - script:
- title: Install testing tools
- run_if: ".IsCI"
- inputs:
- - content: |-
- #!/bin/bash
- set -e
- set -x
-
- # Install dependencies
- go get -u github.com/tools/godep
-
- # Check for unhandled errors
- go get -u github.com/kisielk/errcheck
-
- # Go lint
- go get -u github.com/golang/lint/golint
-
- _install_init_tool:
- title: Installs bitrise-init tool
- description: |
- Installs bitrise-init tool
- steps:
- - script:
- inputs:
- - content: |-
- #!/bin/bash
-
- #
- # Create scanner bin
- echo "Create scanner bin..."
-
- export GO15VENDOREXPERIMENT="1"
- export GOPATH="$GOPATH:$THIS_SCRIPTDIR/go/"
- export ARCH=x86_64
- export GOARCH=amd64
-
- current_os=$(uname -s)
- if [[ "$current_os" == "Darwin" ]] ; then
- export GOOS=darwin
- elif [[ "$current_os" == "Linux" ]]; then
- export GOOS=linux
- else
- echo "step runs on unsupported os: $current_os"
- exit 1
- fi
-
- tmp_dir=$(mktemp -d)
- current_dir=$(pwd)
- bin_pth="$tmp_dir/scanner"
-
- go build -o "$bin_pth"
-
- echo "ceated at: ${bin_pth}"
- envman add --key SCANNER_BIN --value $bin_pth
-
- godeps-update:
- title: Godeps update
- description: |
- Used for updating bitrise dependencies with godep
- before_run:
- - _prepare_and_setup
- steps:
- - script:
- title: Dependency update
- inputs:
- - content: |
- #!/bin/bash
- set -ex
- go get -u -v github.com/tools/godep
-
- rm -rf ./Godeps
- rm -rf ./vendor
-
- go get -t -d ./...
- go get golang.org/x/sys/unix
- go get github.com/davecgh/go-spew/spew
- go get github.com/pmezard/go-difflib/difflib
- go get github.com/ryanuber/go-glob
- godep save ./...
+ echo " Copy final Linux binary to: $BITRISE_DEPLOY_DIR/$BIN_NAME-$OS-$ARCH"
\ No newline at end of file
diff --git a/go/src/github.com/bitrise-core/bitrise-init/cli/cli.go b/go/src/github.com/bitrise-core/bitrise-init/cli/cli.go
index 1a8a154c..d0f663f5 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/cli/cli.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/cli/cli.go
@@ -7,62 +7,21 @@ import (
log "github.com/Sirupsen/logrus"
"github.com/bitrise-core/bitrise-init/version"
- "github.com/bitrise-io/go-utils/colorstring"
"github.com/urfave/cli"
)
-//=======================================
-// Functions
-//=======================================
-
-// PrintHeader ...
-func PrintHeader(c *cli.Context) {
- fmt.Println()
- log.Info(colorstring.Greenf("Running %s v%s", c.App.Name, c.App.Version))
- fmt.Println()
-}
-
-func printVersion(c *cli.Context) {
- fmt.Fprintf(c.App.Writer, "%v\n", c.App.Version)
-}
-
-func before(c *cli.Context) error {
- log.SetFormatter(&log.TextFormatter{
- FullTimestamp: true,
- ForceColors: true,
- TimestampFormat: "15:04:05",
- })
-
- // Log level
- logLevelStr := c.String("loglevel")
- if logLevelStr == "" {
- logLevelStr = "info"
- }
-
- level, err := log.ParseLevel(logLevelStr)
- if err != nil {
- return err
- }
- log.SetLevel(level)
-
- return nil
-}
-
-//=======================================
-// Main
-//=======================================
-
// Run ...
func Run() {
// Parse cl
- cli.VersionPrinter = printVersion
+ cli.VersionPrinter = func(c *cli.Context) {
+ fmt.Fprintf(c.App.Writer, "%v\n", c.App.Version)
+ }
app := cli.NewApp()
app.Name = path.Base(os.Args[0])
app.Usage = "Bitrise Init Tool"
app.Version = version.VERSION
-
app.Author = ""
app.Email = ""
@@ -78,63 +37,33 @@ func Run() {
EnvVar: "CI",
},
}
- app.Before = before
+
+ app.Before = func(c *cli.Context) error {
+ log.SetFormatter(&log.TextFormatter{
+ FullTimestamp: true,
+ ForceColors: true,
+ TimestampFormat: "15:04:05",
+ })
+
+ // Log level
+ logLevelStr := c.String("loglevel")
+ if logLevelStr == "" {
+ logLevelStr = "info"
+ }
+
+ level, err := log.ParseLevel(logLevelStr)
+ if err != nil {
+ return err
+ }
+ log.SetLevel(level)
+
+ return nil
+ }
+
app.Commands = []cli.Command{
- {
- Name: "version",
- Usage: "Prints the version",
- Action: printVersionCmd,
- Flags: []cli.Flag{
- cli.StringFlag{
- Name: "format",
- Usage: "Output format, options [raw, json, yaml].",
- Value: "raw",
- },
- cli.BoolFlag{
- Name: "full",
- Usage: "Prints the build number as well.",
- },
- },
- },
- cli.Command{
- Name: "config",
- Usage: "Generates a bitrise config files based on your project.",
- Action: initConfig,
- Flags: []cli.Flag{
- cli.StringFlag{
- Name: "dir",
- Usage: "Directory to scan.",
- Value: "./",
- },
- cli.StringFlag{
- Name: "output-dir",
- Usage: "Directory to save scan results.",
- Value: "./_scan_result",
- },
- cli.StringFlag{
- Name: "format",
- Usage: "Output format, options [json, yaml].",
- Value: "yaml",
- },
- },
- },
- cli.Command{
- Name: "manual-config",
- Usage: "Generates default bitrise config files.",
- Action: initManualConfig,
- Flags: []cli.Flag{
- cli.StringFlag{
- Name: "output-dir",
- Usage: "Directory to save scan results.",
- Value: "./_defaults",
- },
- cli.StringFlag{
- Name: "format",
- Usage: "Output format, options [json, yaml].",
- Value: "yaml",
- },
- },
- },
+ versionCommand,
+ configCommand,
+ manualConfigCommand,
}
if err := app.Run(os.Args); err != nil {
diff --git a/go/src/github.com/bitrise-core/bitrise-init/cli/config.go b/go/src/github.com/bitrise-core/bitrise-init/cli/config.go
new file mode 100644
index 00000000..b815f879
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/cli/config.go
@@ -0,0 +1,161 @@
+package cli
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path"
+ "path/filepath"
+
+ log "github.com/Sirupsen/logrus"
+ "github.com/bitrise-core/bitrise-init/output"
+ "github.com/bitrise-core/bitrise-init/scanner"
+ "github.com/bitrise-io/go-utils/colorstring"
+ "github.com/bitrise-io/go-utils/pathutil"
+ "github.com/urfave/cli"
+)
+
+const (
+ defaultScanResultDir = "_scan_result"
+)
+
+var configCommand = cli.Command{
+ Name: "config",
+ Usage: "Generates a bitrise config files based on your project.",
+ Action: func(c *cli.Context) error {
+ if err := initConfig(c); err != nil {
+ log.Fatal(err)
+ }
+ return nil
+ },
+ Flags: []cli.Flag{
+ cli.StringFlag{
+ Name: "dir",
+ Usage: "Directory to scan.",
+ Value: "./",
+ },
+ cli.StringFlag{
+ Name: "output-dir",
+ Usage: "Directory to save scan results.",
+ Value: "./_scan_result",
+ },
+ cli.StringFlag{
+ Name: "format",
+ Usage: "Output format, options [json, yaml].",
+ Value: "yaml",
+ },
+ },
+}
+
+func initConfig(c *cli.Context) error {
+ // Config
+ isCI := c.GlobalBool("ci")
+ searchDir := c.String("dir")
+ outputDir := c.String("output-dir")
+ formatStr := c.String("format")
+
+ if isCI {
+ log.Info(colorstring.Yellow("CI mode"))
+ }
+ log.Info(colorstring.Yellowf("scan dir: %s", searchDir))
+ log.Info(colorstring.Yellowf("output dir: %s", outputDir))
+ log.Info(colorstring.Yellowf("output format: %s", formatStr))
+ fmt.Println()
+
+ currentDir, err := pathutil.AbsPath("./")
+ if err != nil {
+ return fmt.Errorf("Failed to expand path (%s), error: %s", outputDir, err)
+ }
+
+ if searchDir == "" {
+ searchDir = currentDir
+ }
+ searchDir, err = pathutil.AbsPath(searchDir)
+ if err != nil {
+ return fmt.Errorf("Failed to expand path (%s), error: %s", outputDir, err)
+ }
+
+ if outputDir == "" {
+ outputDir = filepath.Join(currentDir, defaultScanResultDir)
+ }
+ outputDir, err = pathutil.AbsPath(outputDir)
+ if err != nil {
+ return fmt.Errorf("Failed to expand path (%s), error: %s", outputDir, err)
+ }
+
+ if formatStr == "" {
+ formatStr = output.YAMLFormat.String()
+ }
+ format, err := output.ParseFormat(formatStr)
+ if err != nil {
+ return fmt.Errorf("Failed to parse format (%s), error: %s", formatStr, err)
+ }
+ if format != output.JSONFormat && format != output.YAMLFormat {
+ return fmt.Errorf("Not allowed output format (%s), options: [%s, %s]", format.String(), output.YAMLFormat.String(), output.JSONFormat.String())
+ }
+ // ---
+
+ scanResult, err := scanner.Config(searchDir)
+ if err != nil {
+ return err
+ }
+
+ platforms := []string{}
+ for platform := range scanResult.OptionsMap {
+ platforms = append(platforms, platform)
+ }
+
+ if len(platforms) == 0 {
+ return errors.New("No known platform detected")
+ }
+
+ // Write output to files
+ if isCI {
+ log.Infof(colorstring.Blue("Saving outputs:"))
+
+ if exist, err := pathutil.IsDirExists(outputDir); err != nil {
+ return err
+ } else if !exist {
+ if err := os.MkdirAll(outputDir, 0700); err != nil {
+ return fmt.Errorf("Failed to create (%s), error: %s", outputDir, err)
+ }
+ }
+
+ pth := path.Join(outputDir, "result")
+ outputPth, err := output.WriteToFile(scanResult, format, pth)
+ if err != nil {
+ return fmt.Errorf("Failed to print result, error: %s", err)
+ }
+ log.Infof(" scan result: %s", colorstring.Blue(outputPth))
+
+ return nil
+ }
+ // ---
+
+ // Select option
+ log.Infof(colorstring.Blue("Collecting inputs:"))
+
+ config, err := scanner.AskForConfig(scanResult)
+ if err != nil {
+ return err
+ }
+
+ if exist, err := pathutil.IsDirExists(outputDir); err != nil {
+ return err
+ } else if !exist {
+ if err := os.MkdirAll(outputDir, 0700); err != nil {
+ return fmt.Errorf("Failed to create (%s), error: %s", outputDir, err)
+ }
+ }
+
+ pth := path.Join(outputDir, "bitrise.yml")
+ outputPth, err := output.WriteToFile(config, format, pth)
+ if err != nil {
+ return fmt.Errorf("Failed to print result, error: %s", err)
+ }
+ log.Infof(" bitrise.yml template: %s", colorstring.Blue(outputPth))
+ fmt.Println()
+ // ---
+
+ return nil
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/cli/init_config.go b/go/src/github.com/bitrise-core/bitrise-init/cli/init_config.go
deleted file mode 100644
index 09e758f4..00000000
--- a/go/src/github.com/bitrise-core/bitrise-init/cli/init_config.go
+++ /dev/null
@@ -1,319 +0,0 @@
-package cli
-
-import (
- "fmt"
- "os"
- "path"
- "path/filepath"
-
- "gopkg.in/yaml.v2"
-
- log "github.com/Sirupsen/logrus"
- "github.com/bitrise-core/bitrise-init/models"
- "github.com/bitrise-core/bitrise-init/output"
- "github.com/bitrise-core/bitrise-init/scanners"
- "github.com/bitrise-core/bitrise-init/scanners/android"
- "github.com/bitrise-core/bitrise-init/scanners/fastlane"
- "github.com/bitrise-core/bitrise-init/scanners/ios"
- "github.com/bitrise-core/bitrise-init/scanners/xamarin"
- bitriseModels "github.com/bitrise-io/bitrise/models"
- envmanModels "github.com/bitrise-io/envman/models"
- "github.com/bitrise-io/go-utils/colorstring"
- "github.com/bitrise-io/go-utils/pathutil"
- "github.com/bitrise-io/goinp/goinp"
- "github.com/urfave/cli"
-)
-
-const (
- defaultScanResultDir = "_scan_result"
-)
-
-func askForValue(option models.OptionModel) (string, string, error) {
- optionValues := option.GetValues()
-
- selectedValue := ""
- if len(optionValues) == 1 {
- selectedValue = optionValues[0]
- } else {
- question := fmt.Sprintf("Select: %s", option.Title)
- answer, err := goinp.SelectFromStrings(question, optionValues)
- if err != nil {
- return "", "", err
- }
-
- selectedValue = answer
- }
-
- return option.EnvKey, selectedValue, nil
-}
-
-func initConfig(c *cli.Context) error {
- PrintHeader(c)
-
- //
- // Config
- isCI := c.GlobalBool("ci")
- searchDir := c.String("dir")
- outputDir := c.String("output-dir")
- formatStr := c.String("format")
-
- currentDir, err := pathutil.AbsPath("./")
- if err != nil {
- return fmt.Errorf("Failed to expand path (%s), error: %s", outputDir, err)
- }
-
- if searchDir == "" {
- searchDir = currentDir
- }
- searchDir, err = pathutil.AbsPath(searchDir)
- if err != nil {
- return fmt.Errorf("Failed to expand path (%s), error: %s", outputDir, err)
- }
-
- if outputDir == "" {
- outputDir = filepath.Join(currentDir, defaultScanResultDir)
- }
- outputDir, err = pathutil.AbsPath(outputDir)
- if err != nil {
- return fmt.Errorf("Failed to expand path (%s), error: %s", outputDir, err)
- }
-
- if formatStr == "" {
- formatStr = output.YAMLFormat.String()
- }
- format, err := output.ParseFormat(formatStr)
- if err != nil {
- return fmt.Errorf("Failed to parse format (%s), error: %s", formatStr, err)
- }
- if format != output.JSONFormat && format != output.YAMLFormat {
- return fmt.Errorf("Not allowed output format (%s), options: [%s, %s]", format.String(), output.YAMLFormat.String(), output.JSONFormat.String())
- }
-
- if isCI {
- log.Info(colorstring.Yellow("CI mode"))
- }
- log.Info(colorstring.Yellowf("scan dir: %s", searchDir))
- log.Info(colorstring.Yellowf("output dir: %s", outputDir))
- log.Info(colorstring.Yellowf("output format: %s", format))
- fmt.Println()
-
- if searchDir != currentDir {
- log.Infof("Change work dir to (%s)", searchDir)
- fmt.Println()
- if err := os.Chdir(searchDir); err != nil {
- return fmt.Errorf("Failed to change dir, to (%s), error: %s", searchDir, err)
- }
- defer func() {
- fmt.Println()
- log.Infof("Change work dir to (%s)", currentDir)
- fmt.Println()
- if err := os.Chdir(currentDir); err != nil {
- log.Warnf("Failed to change dir, to (%s), error: %s", searchDir, err)
- }
- }()
- }
-
- //
- // Scan
- projectScanners := []scanners.ScannerInterface{
- new(android.Scanner),
- new(xamarin.Scanner),
- new(ios.Scanner),
- new(fastlane.Scanner),
- }
-
- projectTypeWarningMap := map[string]models.Warnings{}
- projectTypeOptionMap := map[string]models.OptionModel{}
- projectTypeConfigMap := map[string]models.BitriseConfigMap{}
-
- log.Infof(colorstring.Blue("Running scanners:"))
- fmt.Println()
-
- for _, detector := range projectScanners {
- detectorName := detector.Name()
- log.Infof("Scanner: %s", colorstring.Blue(detectorName))
-
- log.Info("+------------------------------------------------------------------------------+")
- log.Info("| |")
-
- detectorWarnings := []string{}
- detector.Configure(searchDir)
- detected, err := detector.DetectPlatform()
- if err != nil {
- log.Errorf("Scanner failed, error: %s", err)
- detectorWarnings = append(detectorWarnings, err.Error())
- projectTypeWarningMap[detectorName] = detectorWarnings
- detected = false
- }
-
- if !detected {
- log.Info("| |")
- log.Info("+------------------------------------------------------------------------------+")
- fmt.Println()
- continue
- }
-
- option, projectWarnings, err := detector.Options()
- detectorWarnings = append(detectorWarnings, projectWarnings...)
-
- if err != nil {
- log.Errorf("Analyzer failed, error: %s", err)
- detectorWarnings = append(detectorWarnings, err.Error())
- projectTypeWarningMap[detectorName] = detectorWarnings
- continue
- }
-
- projectTypeWarningMap[detectorName] = detectorWarnings
-
- log.Debug()
- log.Debug("Analyze result:")
- bytes, err := yaml.Marshal(option)
- if err != nil {
- return fmt.Errorf("Failed to marshal option, error: %s", err)
- }
- log.Debugf("\n%v", string(bytes))
-
- projectTypeOptionMap[detectorName] = option
-
- // Generate configs
- log.Debug()
- log.Debug("Generated configs:")
- configs, err := detector.Configs()
- if err != nil {
- return fmt.Errorf("Failed create configs, error: %s", err)
- }
-
- for name, config := range configs {
- log.Debugf(" name: %s", name)
-
- bytes, err := yaml.Marshal(config)
- if err != nil {
- return fmt.Errorf("Failed to marshal option, error: %s", err)
- }
- log.Debugf("\n%v", string(bytes))
- }
-
- projectTypeConfigMap[detectorName] = configs
-
- log.Info("| |")
- log.Info("+------------------------------------------------------------------------------+")
- fmt.Println()
- }
-
- //
- // Write output to files
- if isCI {
- log.Infof(colorstring.Blue("Saving outputs:"))
-
- scanResult := models.ScanResultModel{
- OptionsMap: projectTypeOptionMap,
- ConfigsMap: projectTypeConfigMap,
- WarningsMap: projectTypeWarningMap,
- }
-
- if err := os.MkdirAll(outputDir, 0700); err != nil {
- return fmt.Errorf("Failed to create (%s), error: %s", outputDir, err)
- }
-
- pth := path.Join(outputDir, "result")
- outputPth, err := output.WriteToFile(scanResult, format, pth)
- if err != nil {
- return fmt.Errorf("Failed to print result, error: %s", err)
- }
- log.Infof(" scan result: %s", colorstring.Blue(outputPth))
-
- return nil
- }
-
- //
- // Select option
- log.Infof(colorstring.Blue("Collecting inputs:"))
-
- for detectorName, option := range projectTypeOptionMap {
- log.Infof(" Scanner: %s", colorstring.Blue(detectorName))
-
- // Init
- platformOutputDir := path.Join(outputDir, detectorName)
- if exist, err := pathutil.IsDirExists(platformOutputDir); err != nil {
- return fmt.Errorf("Failed to check if path (%s) exis, error: %s", platformOutputDir, err)
- } else if exist {
- if err := os.RemoveAll(platformOutputDir); err != nil {
- return fmt.Errorf("Failed to cleanup (%s), error: %s", platformOutputDir, err)
- }
- }
-
- if err := os.MkdirAll(platformOutputDir, 0700); err != nil {
- return fmt.Errorf("Failed to create (%s), error: %s", platformOutputDir, err)
- }
-
- // Collect inputs
- configPth := ""
- appEnvs := []envmanModels.EnvironmentItemModel{}
-
- var walkDepth func(option models.OptionModel) error
-
- walkDepth = func(option models.OptionModel) error {
- optionEnvKey, selectedValue, err := askForValue(option)
- if err != nil {
- return fmt.Errorf("Failed to ask for vale, error: %s", err)
- }
-
- if optionEnvKey == "" {
- configPth = selectedValue
- } else {
- appEnvs = append(appEnvs, envmanModels.EnvironmentItemModel{
- optionEnvKey: selectedValue,
- })
- }
-
- nestedOptions, found := option.ValueMap[selectedValue]
- if !found {
- return nil
- }
-
- return walkDepth(nestedOptions)
- }
-
- if err := walkDepth(option); err != nil {
- return err
- }
-
- log.Debug()
- log.Debug("Selected app envs:")
- aBytes, err := yaml.Marshal(appEnvs)
- if err != nil {
- return fmt.Errorf("Failed to marshal appEnvs, error: %s", err)
- }
- log.Debugf("\n%v", string(aBytes))
-
- configMap := projectTypeConfigMap[detectorName]
- configStr := configMap[configPth]
-
- var config bitriseModels.BitriseDataModel
- if err := yaml.Unmarshal([]byte(configStr), &config); err != nil {
- return fmt.Errorf("Failed to unmarshal config, error: %s", err)
- }
-
- config.App.Environments = append(config.App.Environments, appEnvs...)
-
- log.Debug()
- log.Debug("Config:")
- log.Debugf(" name: %s", configPth)
- aBytes, err = yaml.Marshal(config)
- if err != nil {
- return fmt.Errorf("Failed to marshal config, error: %s", err)
- }
- log.Debugf("\n%v", string(aBytes))
-
- // Write config to file
- pth := path.Join(platformOutputDir, configPth)
- outputPth, err := output.WriteToFile(config, format, pth)
- if err != nil {
- return fmt.Errorf("Failed to print result, error: %s", err)
- }
- log.Infof(" bitrise.yml template: %s", colorstring.Blue(outputPth))
- fmt.Println()
- }
-
- return nil
-}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/cli/init_manual_config.go b/go/src/github.com/bitrise-core/bitrise-init/cli/init_manual_config.go
deleted file mode 100644
index f647cc20..00000000
--- a/go/src/github.com/bitrise-core/bitrise-init/cli/init_manual_config.go
+++ /dev/null
@@ -1,238 +0,0 @@
-package cli
-
-import (
- "fmt"
- "os"
- "path"
- "path/filepath"
-
- "gopkg.in/yaml.v2"
-
- log "github.com/Sirupsen/logrus"
- "github.com/bitrise-core/bitrise-init/models"
- "github.com/bitrise-core/bitrise-init/output"
- "github.com/bitrise-core/bitrise-init/scanners"
- "github.com/bitrise-core/bitrise-init/scanners/android"
- "github.com/bitrise-core/bitrise-init/scanners/fastlane"
- "github.com/bitrise-core/bitrise-init/scanners/ios"
- "github.com/bitrise-core/bitrise-init/scanners/xamarin"
- bitriseModels "github.com/bitrise-io/bitrise/models"
- envmanModels "github.com/bitrise-io/envman/models"
- "github.com/bitrise-io/go-utils/colorstring"
- "github.com/bitrise-io/go-utils/pathutil"
- "github.com/urfave/cli"
-)
-
-const (
- defaultOutputDir = "_defaults"
-)
-
-func initManualConfig(c *cli.Context) error {
- PrintHeader(c)
-
- //
- // Config
- isCI := c.GlobalBool("ci")
- outputDir := c.String("output-dir")
- formatStr := c.String("format")
-
- currentDir, err := pathutil.AbsPath("./")
- if err != nil {
- return fmt.Errorf("Failed to get current directory, error: %s", err)
- }
-
- if outputDir == "" {
- outputDir = filepath.Join(currentDir, defaultOutputDir)
- }
- outputDir, err = pathutil.AbsPath(outputDir)
- if err != nil {
- return fmt.Errorf("Failed to get abs path (%s), error: %s", outputDir, err)
- }
-
- if formatStr == "" {
- formatStr = output.YAMLFormat.String()
- }
- format, err := output.ParseFormat(formatStr)
- if err != nil {
- return fmt.Errorf("Failed to parse format, err: %s", err)
- }
- if format != output.JSONFormat && format != output.YAMLFormat {
- return fmt.Errorf("Not allowed output format (%v), options: [%s, %s]", format, output.YAMLFormat.String(), output.JSONFormat.String())
- }
-
- if isCI {
- log.Info(colorstring.Yellow("CI mode"))
- }
- log.Info(colorstring.Yellowf("output dir: %s", outputDir))
- log.Info(colorstring.Yellowf("output format: %s", format))
- fmt.Println()
-
- //
- // Scan
- projectScanners := []scanners.ScannerInterface{
- new(android.Scanner),
- new(xamarin.Scanner),
- new(ios.Scanner),
- new(fastlane.Scanner),
- }
-
- projectTypeOptionMap := map[string]models.OptionModel{}
- projectTypeConfigMap := map[string]models.BitriseConfigMap{}
-
- for _, detector := range projectScanners {
- detectorName := detector.Name()
-
- option := detector.DefaultOptions()
-
- log.Debug()
- log.Debug("Analyze result:")
- bytes, err := yaml.Marshal(option)
- if err != nil {
- return fmt.Errorf("Failed to marshal option, err: %s", err)
- }
- log.Debugf("\n%v", string(bytes))
-
- projectTypeOptionMap[detectorName] = option
-
- configs, err := detector.DefaultConfigs()
- if err != nil {
- return fmt.Errorf("Failed create default configs, error: %s", err)
- }
-
- for name, config := range configs {
- log.Debugf(" name: %s", name)
-
- bytes, err := yaml.Marshal(config)
- if err != nil {
- return fmt.Errorf("Failed to marshal option, err: %s", err)
- }
- log.Debugf("\n%v", string(bytes))
- }
-
- projectTypeConfigMap[detectorName] = configs
- }
-
- customConfigs, err := scanners.CustomConfig()
- if err != nil {
- return fmt.Errorf("Failed create default custom configs, error: %s", err)
- }
-
- projectTypeConfigMap["custom"] = customConfigs
-
- //
- // Write output to files
- if isCI {
- log.Infof(colorstring.Blue("Saving outputs:"))
-
- scanResult := models.ScanResultModel{
- OptionsMap: projectTypeOptionMap,
- ConfigsMap: projectTypeConfigMap,
- }
-
- if err := os.MkdirAll(outputDir, 0700); err != nil {
- return fmt.Errorf("Failed to create (%s), err: %s", outputDir, err)
- }
-
- pth := path.Join(outputDir, "result")
- outputPth, err := output.WriteToFile(scanResult, format, pth)
-
- if err != nil {
- return fmt.Errorf("Failed to print result, error: %s", err)
- }
- log.Infof(" scan result: %s", colorstring.Blue(outputPth))
-
- return nil
- }
-
- //
- // Select option
- log.Infof(colorstring.Blue("Collecting inputs:"))
-
- for detectorName, option := range projectTypeOptionMap {
- log.Infof(" Scanner: %s", colorstring.Blue(detectorName))
-
- // Init
- platformOutputDir := path.Join(outputDir, detectorName)
- if exist, err := pathutil.IsDirExists(platformOutputDir); err != nil {
- return fmt.Errorf("Failed to check if path (%s) exis, error: %s", platformOutputDir, err)
- } else if exist {
- if err := os.RemoveAll(platformOutputDir); err != nil {
- return fmt.Errorf("Failed to cleanup (%s), error: %s", platformOutputDir, err)
- }
- }
-
- if err := os.MkdirAll(platformOutputDir, 0700); err != nil {
- return fmt.Errorf("Failed to create (%s), error: %s", platformOutputDir, err)
- }
-
- // Collect inputs
- configPth := ""
- appEnvs := []envmanModels.EnvironmentItemModel{}
-
- var walkDepth func(option models.OptionModel) error
-
- walkDepth = func(option models.OptionModel) error {
- optionEnvKey, selectedValue, err := askForValue(option)
- if err != nil {
- return fmt.Errorf("Failed to ask for vale, error: %s", err)
- }
-
- if optionEnvKey == "" {
- configPth = selectedValue
- } else {
- appEnvs = append(appEnvs, envmanModels.EnvironmentItemModel{
- optionEnvKey: selectedValue,
- })
- }
-
- nestedOptions, found := option.ValueMap[selectedValue]
- if !found {
- return err
- }
-
- return walkDepth(nestedOptions)
- }
-
- if err := walkDepth(option); err != nil {
- return err
- }
-
- log.Debug()
- log.Debug("Selected app envs:")
- aBytes, err := yaml.Marshal(appEnvs)
- if err != nil {
- return fmt.Errorf("Failed to marshal appEnvs, error: %s", err)
- }
- log.Debugf("\n%v", string(aBytes))
-
- configMap := projectTypeConfigMap[detectorName]
- configStr := configMap[configPth]
-
- var config bitriseModels.BitriseDataModel
- if err := yaml.Unmarshal([]byte(configStr), &config); err != nil {
- return fmt.Errorf("Failed to unmarshal config, error: %s", err)
- }
-
- config.App.Environments = append(config.App.Environments, appEnvs...)
-
- log.Debug()
- log.Debug("Config:")
- log.Debugf(" name: %s", configPth)
- aBytes, err = yaml.Marshal(config)
- if err != nil {
- return fmt.Errorf("Failed to marshal config, error: %s", err)
- }
- log.Debugf("\n%v", string(aBytes))
-
- // Write config to file
- pth := path.Join(platformOutputDir, configPth)
- outputPth, err := output.WriteToFile(config, format, pth)
- if err != nil {
- return fmt.Errorf("Failed to print result, error: %s", err)
- }
- log.Infof(" bitrise.yml template: %s", colorstring.Blue(outputPth))
- fmt.Println()
- }
-
- return nil
-}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/cli/manual_config.go b/go/src/github.com/bitrise-core/bitrise-init/cli/manual_config.go
new file mode 100644
index 00000000..ead840ad
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/cli/manual_config.go
@@ -0,0 +1,124 @@
+package cli
+
+import (
+ "fmt"
+ "os"
+ "path"
+ "path/filepath"
+
+ log "github.com/Sirupsen/logrus"
+ "github.com/bitrise-core/bitrise-init/output"
+ "github.com/bitrise-core/bitrise-init/scanner"
+ "github.com/bitrise-io/go-utils/colorstring"
+ "github.com/bitrise-io/go-utils/pathutil"
+ "github.com/urfave/cli"
+)
+
+const (
+ defaultOutputDir = "_defaults"
+)
+
+var manualConfigCommand = cli.Command{
+ Name: "manual-config",
+ Usage: "Generates default bitrise config files.",
+ Action: func(c *cli.Context) error {
+ if err := initManualConfig(c); err != nil {
+ log.Fatal(err)
+ }
+ return nil
+ },
+ Flags: []cli.Flag{
+ cli.StringFlag{
+ Name: "output-dir",
+ Usage: "Directory to save scan results.",
+ Value: "./_defaults",
+ },
+ cli.StringFlag{
+ Name: "format",
+ Usage: "Output format, options [json, yaml].",
+ Value: "yaml",
+ },
+ },
+}
+
+func initManualConfig(c *cli.Context) error {
+ // Config
+ isCI := c.GlobalBool("ci")
+ outputDir := c.String("output-dir")
+ formatStr := c.String("format")
+
+ if isCI {
+ log.Info(colorstring.Yellow("CI mode"))
+ }
+ log.Info(colorstring.Yellowf("output dir: %s", outputDir))
+ log.Info(colorstring.Yellowf("output format: %s", formatStr))
+ fmt.Println()
+
+ currentDir, err := pathutil.AbsPath("./")
+ if err != nil {
+ return fmt.Errorf("Failed to get current directory, error: %s", err)
+ }
+
+ if outputDir == "" {
+ outputDir = filepath.Join(currentDir, defaultOutputDir)
+ }
+ outputDir, err = pathutil.AbsPath(outputDir)
+ if err != nil {
+ return fmt.Errorf("Failed to get abs path (%s), error: %s", outputDir, err)
+ }
+
+ if formatStr == "" {
+ formatStr = output.YAMLFormat.String()
+ }
+ format, err := output.ParseFormat(formatStr)
+ if err != nil {
+ return fmt.Errorf("Failed to parse format, err: %s", err)
+ }
+ if format != output.JSONFormat && format != output.YAMLFormat {
+ return fmt.Errorf("Not allowed output format (%v), options: [%s, %s]", format, output.YAMLFormat.String(), output.JSONFormat.String())
+ }
+ // ---
+
+ scanResult, err := scanner.ManualConfig()
+ if err != nil {
+ return err
+ }
+
+ // Write output to files
+ if isCI {
+ log.Infof(colorstring.Blue("Saving outputs:"))
+
+ if err := os.MkdirAll(outputDir, 0700); err != nil {
+ return fmt.Errorf("Failed to create (%s), error: %s", outputDir, err)
+ }
+
+ pth := path.Join(outputDir, "result")
+ outputPth, err := output.WriteToFile(scanResult, format, pth)
+ if err != nil {
+ return fmt.Errorf("Failed to print result, error: %s", err)
+ }
+ log.Infof(" scan result: %s", colorstring.Blue(outputPth))
+
+ return nil
+ }
+ // ---
+
+ // Select option
+ log.Infof(colorstring.Blue("Collecting inputs:"))
+
+ config, err := scanner.AskForConfig(scanResult)
+ if err != nil {
+ return err
+ }
+
+ pth := path.Join(outputDir, "bitrise.yml")
+ outputPth, err := output.WriteToFile(config, format, pth)
+ if err != nil {
+ return fmt.Errorf("Failed to print result, error: %s", err)
+ }
+ log.Infof(" bitrise.yml template: %s", colorstring.Blue(outputPth))
+ fmt.Println()
+ // ---
+
+ return nil
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/cli/version.go b/go/src/github.com/bitrise-core/bitrise-init/cli/version.go
index a502c769..fd66658a 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/cli/version.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/cli/version.go
@@ -3,6 +3,7 @@ package cli
import (
"fmt"
+ log "github.com/Sirupsen/logrus"
"github.com/bitrise-core/bitrise-init/output"
"github.com/bitrise-core/bitrise-init/version"
"github.com/urfave/cli"
@@ -15,7 +16,29 @@ type VersionOutputModel struct {
Commit string `json:"commit" yaml:"commit"`
}
-func printVersionCmd(c *cli.Context) error {
+var versionCommand = cli.Command{
+ Name: "version",
+ Usage: "Prints the version",
+ Action: func(c *cli.Context) error {
+ if err := printVersion(c); err != nil {
+ log.Fatal(err)
+ }
+ return nil
+ },
+ Flags: []cli.Flag{
+ cli.StringFlag{
+ Name: "format",
+ Usage: "Output format, options [raw, json, yaml].",
+ Value: "raw",
+ },
+ cli.BoolFlag{
+ Name: "full",
+ Usage: "Prints the build number as well.",
+ },
+ },
+}
+
+func printVersion(c *cli.Context) error {
fullVersion := c.Bool("full")
formatStr := c.String("format")
diff --git a/go/src/github.com/bitrise-core/bitrise-init/models/models.go b/go/src/github.com/bitrise-core/bitrise-init/models/models.go
index 446f5300..9ec2d5cf 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/models/models.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/models/models.go
@@ -6,23 +6,13 @@ import (
)
const (
- primaryWorkflowID = "primary"
- deployWorkflowID = "deploy"
+ // FormatVersion ...
+ FormatVersion = "1.3.1"
+ defaultSteplibSource = "https://github.com/bitrise-io/bitrise-steplib.git"
+ primaryWorkflowID = "primary"
+ deployWorkflowID = "deploy"
)
-// Warnings ...
-type Warnings []string
-
-// ScanResultModel ...
-type ScanResultModel struct {
- OptionsMap map[string]OptionModel `json:"options,omitempty" yaml:"options,omitempty"`
- ConfigsMap map[string]BitriseConfigMap `json:"configs,omitempty" yaml:"configs,omitempty"`
- WarningsMap map[string]Warnings `json:"warnings,omitempty" yaml:"warnings,omitempty"`
-}
-
-// OptionValueMap ...
-type OptionValueMap map[string]OptionModel
-
// OptionModel ...
type OptionModel struct {
Title string `json:"title,omitempty" yaml:"title,omitempty"`
@@ -32,9 +22,22 @@ type OptionModel struct {
Config string `json:"config,omitempty" yaml:"config,omitempty"`
}
+// OptionValueMap ...
+type OptionValueMap map[string]OptionModel
+
// BitriseConfigMap ...
type BitriseConfigMap map[string]string
+// Warnings ...
+type Warnings []string
+
+// ScanResultModel ...
+type ScanResultModel struct {
+ OptionsMap map[string]OptionModel `json:"options,omitempty" yaml:"options,omitempty"`
+ ConfigsMap map[string]BitriseConfigMap `json:"configs,omitempty" yaml:"configs,omitempty"`
+ WarningsMap map[string]Warnings `json:"warnings,omitempty" yaml:"warnings,omitempty"`
+}
+
// NewOptionModel ...
func NewOptionModel(title, envKey string) OptionModel {
return OptionModel{
@@ -65,8 +68,8 @@ func (option OptionModel) GetValues() []string {
return values
}
-// BitriseDataWithDefaultTriggerMapAndAppEnvsAndPrimaryWorkflowSteps ...
-func BitriseDataWithDefaultTriggerMapAndAppEnvsAndPrimaryWorkflowSteps(appEnvs []envmanModels.EnvironmentItemModel, steps []bitriseModels.StepListItemModel) bitriseModels.BitriseDataModel {
+// BitriseDataWithCIWorkflow ...
+func BitriseDataWithCIWorkflow(appEnvs []envmanModels.EnvironmentItemModel, steps []bitriseModels.StepListItemModel) bitriseModels.BitriseDataModel {
workflows := map[string]bitriseModels.WorkflowModel{
primaryWorkflowID: bitriseModels.WorkflowModel{
Steps: steps,
@@ -86,8 +89,8 @@ func BitriseDataWithDefaultTriggerMapAndAppEnvsAndPrimaryWorkflowSteps(appEnvs [
}
bitriseData := bitriseModels.BitriseDataModel{
- FormatVersion: "1.3.1",
- DefaultStepLibSource: "https://github.com/bitrise-io/bitrise-steplib.git",
+ FormatVersion: FormatVersion,
+ DefaultStepLibSource: defaultSteplibSource,
TriggerMap: triggerMap,
Workflows: workflows,
App: app,
@@ -96,8 +99,8 @@ func BitriseDataWithDefaultTriggerMapAndAppEnvsAndPrimaryWorkflowSteps(appEnvs [
return bitriseData
}
-// DefaultBitriseConfigForIos ...
-func DefaultBitriseConfigForIos(ciSteps, deploySteps []bitriseModels.StepListItemModel) bitriseModels.BitriseDataModel {
+// BitriseDataWithCIAndCDWorkflow ...
+func BitriseDataWithCIAndCDWorkflow(appEnvs []envmanModels.EnvironmentItemModel, ciSteps, deploySteps []bitriseModels.StepListItemModel) bitriseModels.BitriseDataModel {
workflows := map[string]bitriseModels.WorkflowModel{
primaryWorkflowID: bitriseModels.WorkflowModel{
Steps: ciSteps,
@@ -118,40 +121,16 @@ func DefaultBitriseConfigForIos(ciSteps, deploySteps []bitriseModels.StepListIte
},
}
- bitriseData := bitriseModels.BitriseDataModel{
- FormatVersion: "1.3.0",
- DefaultStepLibSource: "https://github.com/bitrise-io/bitrise-steplib.git",
- TriggerMap: triggerMap,
- Workflows: workflows,
- }
-
- return bitriseData
-}
-
-// BitriseDataWithDefaultTriggerMapAndPrimaryWorkflowSteps ...
-func BitriseDataWithDefaultTriggerMapAndPrimaryWorkflowSteps(steps []bitriseModels.StepListItemModel) bitriseModels.BitriseDataModel {
- workflows := map[string]bitriseModels.WorkflowModel{
- primaryWorkflowID: bitriseModels.WorkflowModel{
- Steps: steps,
- },
- }
-
- triggerMap := []bitriseModels.TriggerMapItemModel{
- bitriseModels.TriggerMapItemModel{
- PushBranch: "*",
- WorkflowID: primaryWorkflowID,
- },
- bitriseModels.TriggerMapItemModel{
- PullRequestSourceBranch: "*",
- WorkflowID: primaryWorkflowID,
- },
+ app := bitriseModels.AppModel{
+ Environments: appEnvs,
}
bitriseData := bitriseModels.BitriseDataModel{
- FormatVersion: "1.3.0",
- DefaultStepLibSource: "https://github.com/bitrise-io/bitrise-steplib.git",
+ FormatVersion: FormatVersion,
+ DefaultStepLibSource: defaultSteplibSource,
TriggerMap: triggerMap,
Workflows: workflows,
+ App: app,
}
return bitriseData
diff --git a/go/src/github.com/bitrise-core/bitrise-init/scanner/config.go b/go/src/github.com/bitrise-core/bitrise-init/scanner/config.go
new file mode 100644
index 00000000..b1198571
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/scanner/config.go
@@ -0,0 +1,111 @@
+package scanner
+
+import (
+ "fmt"
+ "os"
+
+ log "github.com/Sirupsen/logrus"
+ "github.com/bitrise-core/bitrise-init/models"
+ "github.com/bitrise-core/bitrise-init/scanners"
+ "github.com/bitrise-io/go-utils/colorstring"
+ "github.com/bitrise-io/go-utils/pathutil"
+)
+
+// Config ...
+func Config(searchDir string) (models.ScanResultModel, error) {
+
+ //
+ // Setup
+ currentDir, err := os.Getwd()
+ if err != nil {
+ return models.ScanResultModel{}, fmt.Errorf("Failed to expand current directory path, error: %s", err)
+ }
+
+ if searchDir == "" {
+ searchDir = currentDir
+ } else {
+ absScerach, err := pathutil.AbsPath(searchDir)
+ if err != nil {
+ return models.ScanResultModel{}, fmt.Errorf("Failed to expand path (%s), error: %s", searchDir, err)
+ }
+ searchDir = absScerach
+ }
+
+ if searchDir != currentDir {
+ if err := os.Chdir(searchDir); err != nil {
+ return models.ScanResultModel{}, fmt.Errorf("Failed to change dir, to (%s), error: %s", searchDir, err)
+ }
+ defer func() {
+ if err := os.Chdir(currentDir); err != nil {
+ log.Warnf("Failed to change dir, to (%s), error: %s", searchDir, err)
+ }
+ }()
+ }
+ // ---
+
+ //
+ // Scan
+ projectScanners := scanners.ActiveScanners
+ projectTypeWarningMap := map[string]models.Warnings{}
+ projectTypeOptionMap := map[string]models.OptionModel{}
+ projectTypeConfigMap := map[string]models.BitriseConfigMap{}
+
+ log.Infof(colorstring.Blue("Running scanners:"))
+ fmt.Println()
+
+ for _, detector := range projectScanners {
+ detectorName := detector.Name()
+ log.Infof("Scanner: %s", colorstring.Blue(detectorName))
+
+ log.Info("+------------------------------------------------------------------------------+")
+ log.Info("| |")
+
+ detectorWarnings := []string{}
+ detected, err := detector.DetectPlatform(searchDir)
+ if err != nil {
+ log.Errorf("Scanner failed, error: %s", err)
+ detectorWarnings = append(detectorWarnings, err.Error())
+ projectTypeWarningMap[detectorName] = detectorWarnings
+ detected = false
+ }
+
+ if !detected {
+ log.Info("| |")
+ log.Info("+------------------------------------------------------------------------------+")
+ fmt.Println()
+ continue
+ }
+
+ options, projectWarnings, err := detector.Options()
+ detectorWarnings = append(detectorWarnings, projectWarnings...)
+
+ if err != nil {
+ log.Errorf("Analyzer failed, error: %s", err)
+ detectorWarnings = append(detectorWarnings, err.Error())
+ projectTypeWarningMap[detectorName] = detectorWarnings
+ continue
+ }
+
+ projectTypeWarningMap[detectorName] = detectorWarnings
+ projectTypeOptionMap[detectorName] = options
+
+ // Generate configs
+ configs, err := detector.Configs()
+ if err != nil {
+ return models.ScanResultModel{}, fmt.Errorf("Failed create configs, error: %s", err)
+ }
+
+ projectTypeConfigMap[detectorName] = configs
+
+ log.Info("| |")
+ log.Info("+------------------------------------------------------------------------------+")
+ fmt.Println()
+ }
+ // ---
+
+ return models.ScanResultModel{
+ OptionsMap: projectTypeOptionMap,
+ ConfigsMap: projectTypeConfigMap,
+ WarningsMap: projectTypeWarningMap,
+ }, nil
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/scanner/manual_config.go b/go/src/github.com/bitrise-core/bitrise-init/scanner/manual_config.go
new file mode 100644
index 00000000..90731f49
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/scanner/manual_config.go
@@ -0,0 +1,40 @@
+package scanner
+
+import (
+ "fmt"
+
+ "github.com/bitrise-core/bitrise-init/models"
+ "github.com/bitrise-core/bitrise-init/scanners"
+)
+
+// ManualConfig ...
+func ManualConfig() (models.ScanResultModel, error) {
+ projectScanners := scanners.ActiveScanners
+ projectTypeOptionMap := map[string]models.OptionModel{}
+ projectTypeConfigMap := map[string]models.BitriseConfigMap{}
+
+ for _, detector := range projectScanners {
+ detectorName := detector.Name()
+
+ option := detector.DefaultOptions()
+ projectTypeOptionMap[detectorName] = option
+
+ configs, err := detector.DefaultConfigs()
+ if err != nil {
+ return models.ScanResultModel{}, fmt.Errorf("Failed create default configs, error: %s", err)
+ }
+ projectTypeConfigMap[detectorName] = configs
+ }
+
+ customConfig, err := scanners.CustomConfig()
+ if err != nil {
+ return models.ScanResultModel{}, fmt.Errorf("Failed create default custom configs, error: %s", err)
+ }
+
+ projectTypeConfigMap["custom"] = customConfig
+
+ return models.ScanResultModel{
+ OptionsMap: projectTypeOptionMap,
+ ConfigsMap: projectTypeConfigMap,
+ }, nil
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/scanner/utils.go b/go/src/github.com/bitrise-core/bitrise-init/scanner/utils.go
new file mode 100644
index 00000000..31f0e2d2
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/scanner/utils.go
@@ -0,0 +1,112 @@
+package scanner
+
+import (
+ "fmt"
+
+ yaml "gopkg.in/yaml.v1"
+
+ "github.com/bitrise-core/bitrise-init/models"
+ bitriseModels "github.com/bitrise-io/bitrise/models"
+ envmanModels "github.com/bitrise-io/envman/models"
+ "github.com/bitrise-io/goinp/goinp"
+)
+
+func askForOptionValue(option models.OptionModel) (string, string, error) {
+ optionValues := option.GetValues()
+
+ selectedValue := ""
+ if len(optionValues) == 1 {
+ selectedValue = optionValues[0]
+ } else {
+ question := fmt.Sprintf("Select: %s", option.Title)
+ answer, err := goinp.SelectFromStrings(question, optionValues)
+ if err != nil {
+ return "", "", err
+ }
+
+ selectedValue = answer
+ }
+
+ return option.EnvKey, selectedValue, nil
+}
+
+// AskForOptions ...
+func AskForOptions(options models.OptionModel) (string, []envmanModels.EnvironmentItemModel, error) {
+ configPth := ""
+ appEnvs := []envmanModels.EnvironmentItemModel{}
+
+ var walkDepth func(option models.OptionModel) error
+
+ walkDepth = func(option models.OptionModel) error {
+ optionEnvKey, selectedValue, err := askForOptionValue(option)
+ if err != nil {
+ return fmt.Errorf("Failed to ask for vale, error: %s", err)
+ }
+
+ if optionEnvKey == "" {
+ configPth = selectedValue
+ } else {
+ appEnvs = append(appEnvs, envmanModels.EnvironmentItemModel{
+ optionEnvKey: selectedValue,
+ })
+ }
+
+ nestedOptions, found := option.ValueMap[selectedValue]
+ if !found {
+ return nil
+ }
+
+ return walkDepth(nestedOptions)
+ }
+
+ if err := walkDepth(options); err != nil {
+ return "", []envmanModels.EnvironmentItemModel{}, err
+ }
+
+ return configPth, appEnvs, nil
+}
+
+// AskForConfig ...
+func AskForConfig(scanResult models.ScanResultModel) (bitriseModels.BitriseDataModel, error) {
+
+ //
+ // Select platform
+ platforms := []string{}
+ for platform := range scanResult.OptionsMap {
+ platforms = append(platforms, platform)
+ }
+
+ platform, err := goinp.SelectFromStrings("Select platform", platforms)
+ if err != nil {
+ return bitriseModels.BitriseDataModel{}, err
+ }
+ // ---
+
+ //
+ // Select config
+ options, ok := scanResult.OptionsMap[platform]
+ if !ok {
+ return bitriseModels.BitriseDataModel{}, fmt.Errorf("invalid platform selected: %s", platform)
+ }
+
+ configPth, appEnvs, err := AskForOptions(options)
+ if err != nil {
+ return bitriseModels.BitriseDataModel{}, err
+ }
+ // --
+
+ //
+ // Build config
+ configMap := scanResult.ConfigsMap[platform]
+ configStr := configMap[configPth]
+
+ var config bitriseModels.BitriseDataModel
+ if err := yaml.Unmarshal([]byte(configStr), &config); err != nil {
+ return bitriseModels.BitriseDataModel{}, fmt.Errorf("failed to unmarshal config, error: %s", err)
+ }
+
+ config.App.Environments = append(config.App.Environments, appEnvs...)
+ // ---
+
+ return config, nil
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/scanners/android/android.go b/go/src/github.com/bitrise-core/bitrise-init/scanners/android/android.go
index fa35e7b9..1f870853 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/scanners/android/android.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/scanners/android/android.go
@@ -49,11 +49,9 @@ set -ex
echo y | android update sdk --no-ui --all --filter platform-tools | grep 'package installed'
-echo y | android update sdk --no-ui --all --filter extra-android-support | grep 'package installed'
echo y | android update sdk --no-ui --all --filter extra-android-m2repository | grep 'package installed'
-
-echo y | android update sdk --no-ui --all --filter extra-google-google_play_services | grep 'package installed'
echo y | android update sdk --no-ui --all --filter extra-google-m2repository | grep 'package installed'
+echo y | android update sdk --no-ui --all --filter extra-google-google_play_services | grep 'package installed'
`
updateAndroidExtraPackagesScriptTite = "Update Android Extra packages"
@@ -136,7 +134,6 @@ func defaultConfigName() string {
// Scanner ...
type Scanner struct {
- SearchDir string
FileList []string
GradleFiles []string
}
@@ -146,16 +143,11 @@ func (scanner Scanner) Name() string {
return scannerName
}
-// Configure ...
-func (scanner *Scanner) Configure(searchDir string) {
- scanner.SearchDir = searchDir
-}
-
// DetectPlatform ...
-func (scanner *Scanner) DetectPlatform() (bool, error) {
- fileList, err := utility.FileList(scanner.SearchDir)
+func (scanner *Scanner) DetectPlatform(searchDir string) (bool, error) {
+ fileList, err := utility.FileList(searchDir)
if err != nil {
- return false, fmt.Errorf("failed to search for files in (%s), error: %s", scanner.SearchDir, err)
+ return false, fmt.Errorf("failed to search for files in (%s), error: %s", searchDir, err)
}
scanner.FileList = fileList
@@ -265,7 +257,7 @@ func (scanner *Scanner) Configs() (models.BitriseConfigMap, error) {
stepList = append(stepList, steps.GitCloneStepListItem())
// Script
- stepList = append(stepList, steps.ScriptSteplistItem(steps.TemplateScriptStepTitiel))
+ stepList = append(stepList, steps.ScriptSteplistItem(steps.ScriptDefaultTitle))
// Script - Update unversioned main android packages
stepList = append(stepList, steps.ScriptSteplistItem(updateAndroidExtraPackagesScriptTite, envmanModels.EnvironmentItemModel{
@@ -283,7 +275,7 @@ func (scanner *Scanner) Configs() (models.BitriseConfigMap, error) {
// DeployToBitriseIo
stepList = append(stepList, steps.DeployToBitriseIoStepListItem())
- bitriseData := models.BitriseDataWithDefaultTriggerMapAndPrimaryWorkflowSteps(stepList)
+ bitriseData := models.BitriseDataWithCIWorkflow([]envmanModels.EnvironmentItemModel{}, stepList)
data, err := yaml.Marshal(bitriseData)
if err != nil {
return models.BitriseConfigMap{}, err
@@ -308,7 +300,7 @@ func (scanner *Scanner) DefaultConfigs() (models.BitriseConfigMap, error) {
stepList = append(stepList, steps.GitCloneStepListItem())
// Script
- stepList = append(stepList, steps.ScriptSteplistItem(steps.TemplateScriptStepTitiel))
+ stepList = append(stepList, steps.ScriptSteplistItem(steps.ScriptDefaultTitle))
// Script - Update unversioned main android packages
stepList = append(stepList, steps.ScriptSteplistItem(updateAndroidExtraPackagesScriptTite, envmanModels.EnvironmentItemModel{
@@ -326,7 +318,7 @@ func (scanner *Scanner) DefaultConfigs() (models.BitriseConfigMap, error) {
// DeployToBitriseIo
stepList = append(stepList, steps.DeployToBitriseIoStepListItem())
- bitriseData := models.BitriseDataWithDefaultTriggerMapAndPrimaryWorkflowSteps(stepList)
+ bitriseData := models.BitriseDataWithCIWorkflow([]envmanModels.EnvironmentItemModel{}, stepList)
data, err := yaml.Marshal(bitriseData)
if err != nil {
return models.BitriseConfigMap{}, err
diff --git a/go/src/github.com/bitrise-core/bitrise-init/scanners/fastlane/fastlane.go b/go/src/github.com/bitrise-core/bitrise-init/scanners/fastlane/fastlane.go
index 34da7582..d48e93ee 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/scanners/fastlane/fastlane.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/scanners/fastlane/fastlane.go
@@ -106,7 +106,6 @@ func defaultConfigName() string {
// Scanner ...
type Scanner struct {
- SearchDir string
Fastfiles []string
}
@@ -115,16 +114,11 @@ func (scanner Scanner) Name() string {
return scannerName
}
-// Configure ...
-func (scanner *Scanner) Configure(searchDir string) {
- scanner.SearchDir = searchDir
-}
-
// DetectPlatform ...
-func (scanner *Scanner) DetectPlatform() (bool, error) {
- fileList, err := utility.FileList(scanner.SearchDir)
+func (scanner *Scanner) DetectPlatform(searchDir string) (bool, error) {
+ fileList, err := utility.FileList(searchDir)
if err != nil {
- return false, fmt.Errorf("failed to search for files in (%s), error: %s", scanner.SearchDir, err)
+ return false, fmt.Errorf("failed to search for files in (%s), error: %s", searchDir, err)
}
// Search for Fastfile
@@ -229,7 +223,7 @@ func (scanner *Scanner) Configs() (models.BitriseConfigMap, error) {
stepList = append(stepList, steps.GitCloneStepListItem())
// Script
- stepList = append(stepList, steps.ScriptSteplistItem(steps.TemplateScriptStepTitiel))
+ stepList = append(stepList, steps.ScriptSteplistItem(steps.ScriptDefaultTitle))
// CertificateAndProfileInstaller
stepList = append(stepList, steps.CertificateAndProfileInstallerStepListItem())
@@ -249,7 +243,7 @@ func (scanner *Scanner) Configs() (models.BitriseConfigMap, error) {
envmanModels.EnvironmentItemModel{fastlaneXcodeListTimeoutEnvKey: fastlaneXcodeListTimeoutEnvValue},
}
- bitriseData := models.BitriseDataWithDefaultTriggerMapAndAppEnvsAndPrimaryWorkflowSteps(appEnvs, stepList)
+ bitriseData := models.BitriseDataWithCIWorkflow(appEnvs, stepList)
data, err := yaml.Marshal(bitriseData)
if err != nil {
return models.BitriseConfigMap{}, err
@@ -273,7 +267,7 @@ func (scanner *Scanner) DefaultConfigs() (models.BitriseConfigMap, error) {
stepList = append(stepList, steps.GitCloneStepListItem())
// Script
- stepList = append(stepList, steps.ScriptSteplistItem(steps.TemplateScriptStepTitiel))
+ stepList = append(stepList, steps.ScriptSteplistItem(steps.ScriptDefaultTitle))
// CertificateAndProfileInstaller
stepList = append(stepList, steps.CertificateAndProfileInstallerStepListItem())
@@ -293,7 +287,7 @@ func (scanner *Scanner) DefaultConfigs() (models.BitriseConfigMap, error) {
envmanModels.EnvironmentItemModel{fastlaneXcodeListTimeoutEnvKey: fastlaneXcodeListTimeoutEnvValue},
}
- bitriseData := models.BitriseDataWithDefaultTriggerMapAndAppEnvsAndPrimaryWorkflowSteps(appEnvs, stepList)
+ bitriseData := models.BitriseDataWithCIWorkflow(appEnvs, stepList)
data, err := yaml.Marshal(bitriseData)
if err != nil {
return models.BitriseConfigMap{}, err
diff --git a/go/src/github.com/bitrise-core/bitrise-init/scanners/ios/ios.go b/go/src/github.com/bitrise-core/bitrise-init/scanners/ios/ios.go
index 6da7326b..cb0c5541 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/scanners/ios/ios.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/scanners/ios/ios.go
@@ -3,11 +3,7 @@ package ios
import (
"errors"
"fmt"
- "os"
"path/filepath"
- "regexp"
- "sort"
- "strings"
"gopkg.in/yaml.v2"
@@ -16,24 +12,16 @@ import (
"github.com/bitrise-core/bitrise-init/utility"
bitriseModels "github.com/bitrise-io/bitrise/models"
envmanModels "github.com/bitrise-io/envman/models"
- "github.com/bitrise-io/go-utils/fileutil"
- "github.com/bitrise-io/xcode-utils/xcodeproj"
+ "github.com/bitrise-tools/go-xcode/xcodeproj"
)
var (
log = utility.NewLogger()
)
-const (
- scannerName = "ios"
-)
+const scannerName = "ios"
-const (
- xcodeprojExtension = ".xcodeproj"
- xcworkspaceExtension = ".xcworkspace"
- podFileBasePath = "Podfile"
- schemeFileExtension = ".xcscheme"
-)
+const defaultConfigName = "default-ios-config"
const (
projectPathKey = "project_path"
@@ -45,170 +33,6 @@ const (
schemeEnvKey = "BITRISE_SCHEME"
)
-var (
- embeddedWorkspacePathRegexp = regexp.MustCompile(`.+\.xcodeproj/.+\.xcworkspace`)
- scanProjectPathRegexpBlackList = []*regexp.Regexp{embeddedWorkspacePathRegexp}
-
- gitFolderName = ".git"
- podsFolderName = "Pods"
- carthageFolderName = "Carthage"
- scanFolderNameBlackList = []string{gitFolderName, podsFolderName, carthageFolderName}
-
- frameworkExt = ".framework"
- scanFolderExtBlackList = []string{frameworkExt}
-)
-
-//--------------------------------------------------
-// Utility
-//--------------------------------------------------
-
-func isPathMatchRegexp(pth string, regexp *regexp.Regexp) bool {
- return (regexp.FindString(pth) != "")
-}
-
-func isPathContainsComponent(pth, component string) bool {
- pathComponents := strings.Split(pth, string(filepath.Separator))
- for _, c := range pathComponents {
- if c == component {
- return true
- }
- }
- return false
-}
-
-func isPathContainsComponentWithExtension(pth, ext string) bool {
- pathComponents := strings.Split(pth, string(filepath.Separator))
- for _, c := range pathComponents {
- e := filepath.Ext(c)
- if e == ext {
- return true
- }
- }
- return false
-}
-
-func isDir(pth string) (bool, error) {
- fileInf, err := os.Lstat(pth)
- if err != nil {
- return false, err
- }
- if fileInf == nil {
- return false, errors.New("no file info available")
- }
- return fileInf.IsDir(), nil
-}
-
-func isRelevantProject(pth string, isTest bool) (bool, error) {
- // xcodeproj & xcworkspace should be a dir
- if !isTest {
- if is, err := isDir(pth); err != nil {
- return false, err
- } else if !is {
- return false, nil
- }
- }
-
- for _, regexp := range scanProjectPathRegexpBlackList {
- if isPathMatchRegexp(pth, regexp) {
- return false, nil
- }
- }
-
- for _, folderName := range scanFolderNameBlackList {
- if isPathContainsComponent(pth, folderName) {
- return false, nil
- }
- }
-
- for _, folderExt := range scanFolderExtBlackList {
- if isPathContainsComponentWithExtension(pth, folderExt) {
- return false, nil
- }
- }
-
- return true, nil
-}
-
-func filterXcodeprojectFiles(fileList []string, isTest bool) ([]string, error) {
- filteredFiles := utility.FilterFilesWithExtensions(fileList, xcodeprojExtension, xcworkspaceExtension)
- relevantFiles := []string{}
-
- for _, file := range filteredFiles {
- if is, err := isRelevantProject(file, isTest); err != nil {
- return []string{}, err
- } else if !is {
- continue
- }
-
- relevantFiles = append(relevantFiles, file)
- }
-
- sort.Sort(utility.ByComponents(relevantFiles))
-
- return relevantFiles, nil
-}
-
-func isRelevantPodfile(pth string) bool {
- basename := filepath.Base(pth)
- if !utility.CaseInsensitiveEquals(basename, "podfile") {
- return false
- }
-
- for _, folderName := range scanFolderNameBlackList {
- if isPathContainsComponent(pth, folderName) {
- return false
- }
- }
-
- for _, folderExt := range scanFolderExtBlackList {
- if isPathContainsComponentWithExtension(pth, folderExt) {
- return false
- }
- }
-
- return true
-}
-
-func filterPodFiles(fileList []string) []string {
- podfiles := []string{}
-
- for _, file := range fileList {
- if isRelevantPodfile(file) {
- podfiles = append(podfiles, file)
- }
- }
-
- if len(podfiles) == 0 {
- return []string{}
- }
-
- sort.Sort(utility.ByComponents(podfiles))
-
- return podfiles
-}
-
-func configName(hasPodfile, hasTest, missingSharedSchemes bool) string {
- name := "ios-"
- if hasPodfile {
- name = name + "pod-"
- }
- if hasTest {
- name = name + "test-"
- }
- if missingSharedSchemes {
- name = name + "missing-shared-schemes-"
- }
- return name + "config"
-}
-
-func defaultConfigName() string {
- return "default-ios-config"
-}
-
-//--------------------------------------------------
-// Scanner
-//--------------------------------------------------
-
// ConfigDescriptor ...
type ConfigDescriptor struct {
HasPodfile bool
@@ -232,9 +56,9 @@ func (descriptor ConfigDescriptor) String() string {
// Scanner ...
type Scanner struct {
- SearchDir string
- FileList []string
- XcodeProjectAndWorkspaceFiles []string
+ fileList []string
+
+ xcodeProjectAndWorkspaceFiles []string
configDescriptors []ConfigDescriptor
}
@@ -244,323 +68,296 @@ func (scanner Scanner) Name() string {
return scannerName
}
-// Configure ...
-func (scanner *Scanner) Configure(searchDir string) {
- scanner.SearchDir = searchDir
-}
-
// DetectPlatform ...
-func (scanner *Scanner) DetectPlatform() (bool, error) {
- fileList, err := utility.FileList(scanner.SearchDir)
+func (scanner *Scanner) DetectPlatform(searchDir string) (bool, error) {
+ fileList, err := utility.FileList(searchDir)
if err != nil {
- return false, fmt.Errorf("failed to search for files in (%s), error: %s", scanner.SearchDir, err)
+ return false, fmt.Errorf("failed to search for files in (%s), error: %s", searchDir, err)
}
- scanner.FileList = fileList
+ scanner.fileList = fileList
- // Search for xcodeproj file
- log.Info("Searching for .xcodeproj & .xcworkspace files")
+ // Search for xcodeproj and xcworkspace files
+ log.Info("Searching for iOS .xcodeproj & .xcworkspace files")
- xcodeProjectFiles, err := filterXcodeprojectFiles(fileList, false)
+ relevantXcodeProjectFiles, err := utility.FilterRelevantXcodeProjectFiles(fileList, false)
if err != nil {
return false, fmt.Errorf("failed to collect .xcodeproj & .xcworkspace files, error: %s", err)
}
- scanner.XcodeProjectAndWorkspaceFiles = xcodeProjectFiles
-
- log.Details("%d project file(s) detected", len(xcodeProjectFiles))
- for _, file := range xcodeProjectFiles {
- log.Details("- %s", file)
- }
- if len(xcodeProjectFiles) == 0 {
+ if len(relevantXcodeProjectFiles) == 0 {
log.Details("platform not detected")
-
return false, nil
}
- log.Done("Platform detected")
+ // Separate xcodeproj and xcworkspace files
+ projects := []string{}
+ workspaces := []string{}
- return true, nil
-}
-
-// Options ...
-func (scanner *Scanner) Options() (models.OptionModel, models.Warnings, error) {
- //
- // Create Pod workspace - project mapping
- log.Info("Searching for Podfiles")
- warnings := models.Warnings{}
-
- podFiles := filterPodFiles(scanner.FileList)
-
- log.Details("%d Podfile(s) detected", len(podFiles))
- for _, file := range podFiles {
- log.Details("- %s", file)
+ for _, projectOrWorkspace := range relevantXcodeProjectFiles {
+ if xcodeproj.IsXCodeProj(projectOrWorkspace) {
+ projects = append(projects, projectOrWorkspace)
+ } else {
+ workspaces = append(workspaces, projectOrWorkspace)
+ }
}
- validPodfileFound := false
+ // Filter xcodeproj and xcworkspace files with iphoneos sdk
+ iphoneosXcodeProjectFileMap := map[string]bool{}
- podfileWorkspaceProjectMap := map[string]string{}
- for _, podFile := range podFiles {
- log.Info("Inspecting Podfile: %s", podFile)
-
- var err error
- podfileWorkspaceProjectMap, err = utility.GetRelativeWorkspaceProjectPathMap(podFile, scanner.SearchDir)
+ for _, project := range projects {
+ pbxprojPth := filepath.Join(project, "project.pbxproj")
+ sdks, err := xcodeproj.GetBuildConfigSDKs(pbxprojPth)
if err != nil {
- log.Warn("Analyze Podfile (%s) failed, error: %s", podFile, err)
-
- if podfileContent, err := fileutil.ReadStringFromFile(podFile); err != nil {
- log.Warn("Failed to read Podfile (%s)", podFile)
- } else {
- fmt.Println(podfileContent)
- fmt.Println("")
+ return false, err
+ }
+ for _, sdk := range sdks {
+ if sdk == "iphoneos" {
+ iphoneosXcodeProjectFileMap[project] = true
}
+ }
+ }
- warnings = append(warnings, fmt.Sprintf("Failed to analyze Podfile: (%s), error: %s", podFile, err))
- continue
+ for _, workspace := range workspaces {
+ referredProjects, err := xcodeproj.WorkspaceProjectReferences(workspace)
+ if err != nil {
+ return false, err
}
- log.Details("workspace mapping:")
- for workspace, linkedProject := range podfileWorkspaceProjectMap {
- log.Details("- %s -> %s", workspace, linkedProject)
+ // Only deal with relevant projects
+ filteredProjects := []string{}
+ for _, project := range projects {
+ for _, projectToCheck := range projects {
+ if project == projectToCheck {
+ filteredProjects = append(filteredProjects, project)
+ }
+ }
}
+ referredProjects = filteredProjects
+ // ---
- validPodfileFound = true
+ for _, project := range referredProjects {
+ pbxprojPth := filepath.Join(project, "project.pbxproj")
+ sdks, err := xcodeproj.GetBuildConfigSDKs(pbxprojPth)
+ if err != nil {
+ return false, err
+ }
+ for _, sdk := range sdks {
+ if sdk == "iphoneos" {
+ iphoneosXcodeProjectFileMap[project] = true
+ }
+ }
+ }
}
- if len(podFiles) > 0 && !validPodfileFound {
- log.Error("%d Podfiles detected, but scanner was not able to analyze any of them", len(podFiles))
- return models.OptionModel{}, warnings, fmt.Errorf("%d Podfiles detected, but scanner was not able to analyze any of them", len(podFiles))
+ if len(iphoneosXcodeProjectFileMap) == 0 {
+ log.Details("platform not detected")
+ return false, nil
}
- // -----
- //
- // Separate projects and workspaces
- log.Info("Separate projects and workspaces")
- projects := []ProjectModel{}
- workspaces := []WorkspaceModel{}
-
- for _, workspaceOrProjectPth := range scanner.XcodeProjectAndWorkspaceFiles {
- if xcodeproj.IsXCodeProj(workspaceOrProjectPth) {
- project := ProjectModel{Pth: workspaceOrProjectPth}
- projects = append(projects, project)
- } else {
- workspace := WorkspaceModel{Pth: workspaceOrProjectPth}
- workspaces = append(workspaces, workspace)
- }
+ log.Details("")
+ log.Done("Platform detected")
+
+ iphoneosXcodeProjectFiles := []string{}
+ for iphoneosXcodeProjectFile := range iphoneosXcodeProjectFileMap {
+ iphoneosXcodeProjectFiles = append(iphoneosXcodeProjectFiles, iphoneosXcodeProjectFile)
}
- // -----
- //
- // Separate standalone projects, standalone workspaces and pod projects
- standaloneProjects := []ProjectModel{}
- standaloneWorkspaces := []WorkspaceModel{}
- podProjects := []ProjectModel{}
+ scanner.xcodeProjectAndWorkspaceFiles = iphoneosXcodeProjectFiles
- for _, project := range projects {
- if !utility.MapStringStringHasValue(podfileWorkspaceProjectMap, project.Pth) {
- standaloneProjects = append(standaloneProjects, project)
+ return true, nil
+}
+
+// Options ...
+func (scanner *Scanner) Options() (models.OptionModel, models.Warnings, error) {
+ warnings := models.Warnings{}
+
+ // Separate workspaces and standalone projects
+ workspaces := []xcodeproj.WorkspaceModel{}
+
+ projectsToCheck := []string{}
+ for _, projectOrWorkspace := range scanner.xcodeProjectAndWorkspaceFiles {
+ if xcodeproj.IsXCodeProj(projectOrWorkspace) {
+ projectsToCheck = append(projectsToCheck, projectOrWorkspace)
}
}
- log.Details("%d Standalone project(s) detected", len(standaloneProjects))
- for _, project := range standaloneProjects {
- log.Details("- %s", project.Pth)
- }
+ for _, projectOrWorkspace := range scanner.xcodeProjectAndWorkspaceFiles {
+ if xcodeproj.IsXCWorkspace(projectOrWorkspace) {
+ workspace, err := xcodeproj.NewWorkspace(projectOrWorkspace, projectsToCheck...)
+ if err != nil {
+ return models.OptionModel{}, models.Warnings{}, fmt.Errorf("failed to analyze workspace (%s), error: %s", projectOrWorkspace, err)
+ }
- for _, workspace := range workspaces {
- if _, found := podfileWorkspaceProjectMap[workspace.Pth]; !found {
- standaloneWorkspaces = append(standaloneWorkspaces, workspace)
+ workspaces = append(workspaces, workspace)
}
}
- log.Details("%d Standalone workspace(s) detected", len(standaloneWorkspaces))
- for _, workspace := range standaloneWorkspaces {
- log.Details("- %s", workspace.Pth)
+ if len(workspaces) > 0 {
+ log.Details("%d workspace file(s) detected", len(workspaces))
+ for _, workspace := range workspaces {
+ projects := []string{}
+ for _, project := range workspace.Projects {
+ projects = append(projects, project.Name)
+ }
+ log.Details("- %s (projects: %v)", workspace.Name, projects)
+ }
}
- for podWorkspacePth, linkedProjectPth := range podfileWorkspaceProjectMap {
- project, found := FindProjectWithPth(projects, linkedProjectPth)
- if !found {
- log.Warn("workspace mapping contains project (%s), but not found in project list", linkedProjectPth)
- warnings = append(warnings, "Workspace (%s) should generated by project (%s), but project not found in the project list", podWorkspacePth, linkedProjectPth)
+ projects := []xcodeproj.ProjectModel{}
+
+ for _, projectOrWorkspace := range scanner.xcodeProjectAndWorkspaceFiles {
+ if !xcodeproj.IsXCodeProj(projectOrWorkspace) {
continue
}
- workspace, found := FindWorkspaceWithPth(workspaces, podWorkspacePth)
- if !found {
- workspace = WorkspaceModel{Pth: podWorkspacePth}
+ contained := false
+
+ for _, workspace := range workspaces {
+ for _, project := range workspace.Projects {
+ if project.Pth == projectOrWorkspace {
+ contained = true
+ }
+ }
}
- workspace.GeneratedByPod = true
+ if !contained {
+ project, err := xcodeproj.NewProject(projectOrWorkspace)
+ if err != nil {
+ return models.OptionModel{}, models.Warnings{}, fmt.Errorf("failed to analyze project (%s), error: %s", projectOrWorkspace, err)
+ }
- project.PodWorkspace = workspace
- podProjects = append(podProjects, project)
+ projects = append(projects, project)
+ }
}
- log.Details("%d Pod project(s) detected", len(podProjects))
- for _, project := range podProjects {
- log.Details("- %s -> %s", project.Pth, project.PodWorkspace.Pth)
+ if len(projects) > 0 {
+ log.Details("%d project file(s) detected", len(projects))
+ for _, project := range projects {
+ log.Details("- %s", project.Name)
+ }
}
- // -----
+ // ---
- //
- // Analyze projects and workspaces
- analyzedProjects := []ProjectModel{}
- analyzedWorkspaces := []WorkspaceModel{}
+ // Create cocoapods project-workspace mapping
+ log.Info("Searching for Podfiles")
- for _, project := range standaloneProjects {
- log.Info("Inspecting standalone project file: %s", project.Pth)
+ podFiles := utility.FilterRelevantPodFiles(scanner.fileList)
- schemes := []SchemeModel{}
+ log.Details("%d Podfile(s) detected", len(podFiles))
+ for _, file := range podFiles {
+ log.Details("- %s", file)
+ }
- schemeXCtestMap, err := xcodeproj.ProjectSharedSchemes(project.Pth)
+ for _, podfile := range podFiles {
+ workspaceProjectMap, err := utility.GetWorkspaceProjectMap(podfile)
if err != nil {
- log.Warn("Failed to get shared schemes, error: %s", err)
- warnings = append(warnings, fmt.Sprintf("Failed to get shared schemes for project (%s), error: %s", project.Pth, err))
+ log.Warn("Analyze Podfile (%s) failed, error: %s", podfile, err)
+ warnings = append(warnings, fmt.Sprintf("Failed to analyze Podfile: (%s), error: %s", podfile, err))
continue
}
- log.Details("%d shared scheme(s) detected", len(schemeXCtestMap))
- for scheme, hasXCTest := range schemeXCtestMap {
- log.Details("- %s", scheme)
+ log.Details("")
+ log.Details("cocoapods workspace-project mapping:")
+ for workspacePth, linkedProjectPth := range workspaceProjectMap {
+ log.Details("- %s -> %s", workspacePth, linkedProjectPth)
- schemes = append(schemes, SchemeModel{Name: scheme, HasXCTest: hasXCTest, Shared: true})
- }
+ podWorkspace := xcodeproj.WorkspaceModel{}
- if len(schemeXCtestMap) == 0 {
- log.Details("")
- log.Error("No shared schemes found, adding recreate-user-schemes step...")
- log.Error("The newly generated schemes may differ from the ones in your project.")
- log.Error("Make sure to share your schemes, to have the expected behaviour.")
- log.Details("")
+ projectFound := false
- message := `No shared schemes found for project: ` + project.Pth + `.
-Automatically generated schemes for this project.
-These schemes may differ from the ones in your project.
-Make sure to share your schemes for the expected behaviour.`
+ for _, workspace := range workspaces {
+ if workspace.Pth == workspacePth {
+ podWorkspace = workspace
- warnings = append(warnings, fmt.Sprintf(message))
+ for _, project := range workspace.Projects {
+ if project.Pth == linkedProjectPth {
+ projectFound = true
+ }
+ }
- targetXCTestMap, err := xcodeproj.ProjectTargets(project.Pth)
- if err != nil {
- log.Warn("Failed to get targets, error: %s", err)
- warnings = append(warnings, fmt.Sprintf("Failed to get targets for project (%s), error: %s", project.Pth, err))
- continue
+ if !projectFound {
+ return models.OptionModel{}, models.Warnings{}, fmt.Errorf("workspace (%s) is exists, but does not conatins project (%s)", workspace.Name, linkedProjectPth)
+ }
+ }
+ }
+ podWorkspace.IsPodWorkspace = true
+
+ if !projectFound {
+ for _, project := range projects {
+ if project.Pth == linkedProjectPth {
+ projectFound = true
+ podWorkspace.Projects = append(podWorkspace.Projects, project)
+ }
+ }
}
- log.Warn("%d user scheme(s) will be generated", len(targetXCTestMap))
- for target, hasXCTest := range targetXCTestMap {
- log.Warn("- %s", target)
-
- schemes = append(schemes, SchemeModel{Name: target, HasXCTest: hasXCTest, Shared: false})
+ if !projectFound {
+ return models.OptionModel{}, models.Warnings{}, fmt.Errorf("project (%s) not found", linkedProjectPth)
}
}
-
- project.Schemes = schemes
- analyzedProjects = append(analyzedProjects, project)
}
+ // ---
- for _, workspace := range standaloneWorkspaces {
- log.Info("Inspecting standalone workspace file: %s", workspace.Pth)
-
- schemes := []SchemeModel{}
-
- schemeXCtestMap, err := xcodeproj.WorkspaceSharedSchemes(workspace.Pth)
- if err != nil {
- log.Warn("Failed to get shared schemes, error: %s", err)
- warnings = append(warnings, fmt.Sprintf("Failed to get shared schemes for project (%s), error: %s", workspace.Pth, err))
- continue
- }
-
- log.Details("%d shared scheme(s) detected", len(schemeXCtestMap))
- for scheme, hasXCTest := range schemeXCtestMap {
- log.Details("- %s", scheme)
+ //
+ // Analyze projects and workspaces
+ for _, project := range projects {
+ log.Info("Inspecting standalone project file: %s", project.Pth)
- schemes = append(schemes, SchemeModel{Name: scheme, HasXCTest: hasXCTest, Shared: true})
+ log.Details("%d shared scheme(s) detected", len(project.SharedSchemes))
+ for _, scheme := range project.SharedSchemes {
+ log.Details("- %s", scheme.Name)
}
- if len(schemeXCtestMap) == 0 {
+ if len(project.SharedSchemes) == 0 {
log.Details("")
log.Error("No shared schemes found, adding recreate-user-schemes step...")
- log.Error("The newly generated schemes, may differs from the ones in your project.")
+ log.Error("The newly generated schemes may differ from the ones in your project.")
log.Error("Make sure to share your schemes, to have the expected behaviour.")
log.Details("")
- message := `No shared schemes found for project: ` + workspace.Pth + `.
-Automatically generated schemes for this project.
-These schemes may differ from the ones in your project.
-Make sure to share your schemes for the expected behaviour.`
+ message := `No shared schemes found for project: ` + project.Pth + `.
+ Automatically generated schemes for this project.
+ These schemes may differ from the ones in your project.
+ Make sure to share your schemes for the expected behaviour.`
warnings = append(warnings, fmt.Sprintf(message))
- targetXCTestMap, err := xcodeproj.WorkspaceTargets(workspace.Pth)
- if err != nil {
- log.Warn("Failed to get targets, error: %s", err)
- warnings = append(warnings, fmt.Sprintf("Failed to get targets for project (%s), error: %s", workspace.Pth, err))
- continue
- }
-
- log.Warn("%d user scheme(s) will be generated", len(targetXCTestMap))
- for target, hasXCTest := range targetXCTestMap {
- log.Warn("- %s", target)
-
- schemes = append(schemes, SchemeModel{Name: target, HasXCTest: hasXCTest, Shared: false})
+ log.Warn("%d user scheme(s) will be generated", len(project.Targets))
+ for _, target := range project.Targets {
+ log.Warn("- %s", target.Name)
}
}
-
- workspace.Schemes = schemes
- analyzedWorkspaces = append(analyzedWorkspaces, workspace)
}
- for _, project := range podProjects {
- log.Info("Inspecting pod project file: %s", project.Pth)
-
- schemes := []SchemeModel{}
-
- schemeXCtestMap, err := xcodeproj.ProjectSharedSchemes(project.Pth)
- if err != nil {
- log.Warn("Failed to get shared schemes, error: %s", err)
- warnings = append(warnings, fmt.Sprintf("Failed to get shared schemes for project (%s), error: %s", project.Pth, err))
- continue
- }
-
- log.Details("%d shared scheme(s) detected", len(schemeXCtestMap))
- for scheme, hasXCTest := range schemeXCtestMap {
- log.Details("- %s", scheme)
+ for _, workspace := range workspaces {
+ log.Info("Inspecting workspace file: %s", workspace.Pth)
- schemes = append(schemes, SchemeModel{Name: scheme, HasXCTest: hasXCTest, Shared: true})
+ sharedSchemes := workspace.GetSharedSchemes()
+ log.Details("%d shared scheme(s) detected", len(sharedSchemes))
+ for _, scheme := range sharedSchemes {
+ log.Details("- %s", scheme.Name)
}
- if len(schemeXCtestMap) == 0 {
+ if len(sharedSchemes) == 0 {
log.Details("")
log.Error("No shared schemes found, adding recreate-user-schemes step...")
log.Error("The newly generated schemes, may differs from the ones in your project.")
log.Error("Make sure to share your schemes, to have the expected behaviour.")
log.Details("")
- message := `No shared schemes found for project: ` + project.Pth + `.
-Automatically generated schemes for this project.
-These schemes may differ from the ones in your project.
-Make sure to share your schemes for the expected behaviour.`
+ message := `No shared schemes found for project: ` + workspace.Pth + `.
+ Automatically generated schemes for this project.
+ These schemes may differ from the ones in your project.
+ Make sure to share your schemes for the expected behaviour.`
warnings = append(warnings, fmt.Sprintf(message))
- targetXCTestMap, err := xcodeproj.ProjectTargets(project.Pth)
- if err != nil {
- log.Warn("Failed to get targets, error: %s", err)
- warnings = append(warnings, fmt.Sprintf("Failed to get targets for project (%s), error: %s", project.Pth, err))
- continue
- }
-
- log.Warn("%d user scheme(s) will be generated", len(targetXCTestMap))
- for target, hasXCTest := range targetXCTestMap {
- log.Warn("- %s", target)
-
- schemes = append(schemes, SchemeModel{Name: target, HasXCTest: hasXCTest, Shared: false})
+ targets := workspace.GetTargets()
+ log.Warn("%d user scheme(s) will be generated", len(targets))
+ for _, target := range targets {
+ log.Warn("- %s", target.Name)
}
}
-
- project.PodWorkspace.Schemes = schemes
- analyzedWorkspaces = append(analyzedWorkspaces, project.PodWorkspace)
}
// -----
@@ -569,41 +366,77 @@ Make sure to 0 {
+ log.Details("%d workspace file(s) detected", len(workspaces))
+ for _, workspace := range workspaces {
+ projects := []string{}
+ for _, project := range workspace.Projects {
+ projects = append(projects, project.Name)
+ }
+ log.Details("- %s (projects: %v)", workspace.Name, projects)
+ }
+ }
+
+ projects := []xcodeproj.ProjectModel{}
+
+ for _, projectOrWorkspace := range scanner.xcodeProjectAndWorkspaceFiles {
+ if !xcodeproj.IsXCodeProj(projectOrWorkspace) {
+ continue
+ }
+
+ contained := false
+
+ for _, workspace := range workspaces {
+ for _, project := range workspace.Projects {
+ if project.Pth == projectOrWorkspace {
+ contained = true
+ }
+ }
+ }
+
+ if !contained {
+ project, err := xcodeproj.NewProject(projectOrWorkspace)
+ if err != nil {
+ return models.OptionModel{}, models.Warnings{}, fmt.Errorf("failed to analyze project (%s), error: %s", projectOrWorkspace, err)
+ }
+
+ projects = append(projects, project)
+ }
+ }
+
+ if len(projects) > 0 {
+ log.Details("%d project file(s) detected", len(projects))
+ for _, project := range projects {
+ log.Details("- %s", project.Name)
+ }
+ }
+ // ---
+
+ // Create cocoapods project-workspace mapping
+ log.Info("Searching for Podfiles")
+
+ podFiles := utility.FilterRelevantPodFiles(scanner.fileList)
+
+ log.Details("%d Podfile(s) detected", len(podFiles))
+ for _, file := range podFiles {
+ log.Details("- %s", file)
+ }
+
+ for _, podfile := range podFiles {
+ workspaceProjectMap, err := utility.GetWorkspaceProjectMap(podfile)
+ if err != nil {
+ log.Warn("Analyze Podfile (%s) failed, error: %s", podfile, err)
+ warnings = append(warnings, fmt.Sprintf("Failed to analyze Podfile: (%s), error: %s", podfile, err))
+ continue
+ }
+
+ log.Details("")
+ log.Details("cocoapods workspace-project mapping:")
+ for workspacePth, linkedProjectPth := range workspaceProjectMap {
+ log.Details("- %s -> %s", workspacePth, linkedProjectPth)
+
+ podWorkspace := xcodeproj.WorkspaceModel{}
+
+ projectFound := false
+
+ for _, workspace := range workspaces {
+ if workspace.Pth == workspacePth {
+ podWorkspace = workspace
+
+ for _, project := range workspace.Projects {
+ if project.Pth == linkedProjectPth {
+ projectFound = true
+ }
+ }
+
+ if !projectFound {
+ return models.OptionModel{}, models.Warnings{}, fmt.Errorf("workspace (%s) is exists, but does not conatins project (%s)", workspace.Name, linkedProjectPth)
+ }
+ }
+ }
+ podWorkspace.IsPodWorkspace = true
+
+ if !projectFound {
+ for _, project := range projects {
+ if project.Pth == linkedProjectPth {
+ projectFound = true
+ podWorkspace.Projects = append(podWorkspace.Projects, project)
+ }
+ }
+ }
+
+ if !projectFound {
+ return models.OptionModel{}, models.Warnings{}, fmt.Errorf("project (%s) not found", linkedProjectPth)
+ }
+ }
+ }
+ // ---
+
+ //
+ // Analyze projects and workspaces
+ for _, project := range projects {
+ log.Info("Inspecting standalone project file: %s", project.Pth)
+
+ log.Details("%d shared scheme(s) detected", len(project.SharedSchemes))
+ for _, scheme := range project.SharedSchemes {
+ log.Details("- %s", scheme.Name)
+ }
+
+ if len(project.SharedSchemes) == 0 {
+ log.Details("")
+ log.Error("No shared schemes found, adding recreate-user-schemes step...")
+ log.Error("The newly generated schemes may differ from the ones in your project.")
+ log.Error("Make sure to share your schemes, to have the expected behaviour.")
+ log.Details("")
+
+ message := `No shared schemes found for project: ` + project.Pth + `.
+ Automatically generated schemes for this project.
+ These schemes may differ from the ones in your project.
+ Make sure to share your schemes for the expected behaviour.`
+
+ warnings = append(warnings, fmt.Sprintf(message))
+
+ log.Warn("%d user scheme(s) will be generated", len(project.Targets))
+ for _, target := range project.Targets {
+ log.Warn("- %s", target.Name)
+ }
+ }
+ }
+
+ for _, workspace := range workspaces {
+ log.Info("Inspecting workspace file: %s", workspace.Pth)
+
+ sharedSchemes := workspace.GetSharedSchemes()
+ log.Details("%d shared scheme(s) detected", len(sharedSchemes))
+ for _, scheme := range sharedSchemes {
+ log.Details("- %s", scheme.Name)
+ }
+
+ if len(sharedSchemes) == 0 {
+ log.Details("")
+ log.Error("No shared schemes found, adding recreate-user-schemes step...")
+ log.Error("The newly generated schemes, may differs from the ones in your project.")
+ log.Error("Make sure to share your schemes, to have the expected behaviour.")
+ log.Details("")
+
+ message := `No shared schemes found for project: ` + workspace.Pth + `.
+ Automatically generated schemes for this project.
+ These schemes may differ from the ones in your project.
+ Make sure to share your schemes for the expected behaviour.`
+
+ warnings = append(warnings, fmt.Sprintf(message))
+
+ targets := workspace.GetTargets()
+ log.Warn("%d user scheme(s) will be generated", len(targets))
+ for _, target := range targets {
+ log.Warn("- %s", target.Name)
+ }
+ }
+ }
+ // -----
+
+ //
+ // Create config descriptors
+ configDescriptors := []ConfigDescriptor{}
+ projectPathOption := models.NewOptionModel(projectPathTitle, projectPathEnvKey)
+
+ for _, project := range projects {
+ schemeOption := models.NewOptionModel(schemeTitle, schemeEnvKey)
+
+ if len(project.SharedSchemes) == 0 {
+ for _, target := range project.Targets {
+ configDescriptor := ConfigDescriptor{
+ HasPodfile: false,
+ HasTest: target.HasXCTest,
+ MissingSharedSchemes: true,
+ }
+ configDescriptors = append(configDescriptors, configDescriptor)
+
+ configOption := models.NewEmptyOptionModel()
+ configOption.Config = configDescriptor.String()
+
+ schemeOption.ValueMap[target.Name] = configOption
+ }
+ } else {
+ for _, scheme := range project.SharedSchemes {
+ configDescriptor := ConfigDescriptor{
+ HasPodfile: false,
+ HasTest: scheme.HasXCTest,
+ MissingSharedSchemes: false,
+ }
+ configDescriptors = append(configDescriptors, configDescriptor)
+
+ configOption := models.NewEmptyOptionModel()
+ configOption.Config = configDescriptor.String()
+
+ schemeOption.ValueMap[scheme.Name] = configOption
+ }
+ }
+
+ projectPathOption.ValueMap[project.Pth] = schemeOption
+ }
+
+ for _, workspace := range workspaces {
+ schemeOption := models.NewOptionModel(schemeTitle, schemeEnvKey)
+
+ schemes := workspace.GetSharedSchemes()
+
+ if len(schemes) == 0 {
+ targets := workspace.GetTargets()
+
+ for _, target := range targets {
+ configDescriptor := ConfigDescriptor{
+ HasPodfile: workspace.IsPodWorkspace,
+ HasTest: target.HasXCTest,
+ MissingSharedSchemes: true,
+ }
+ configDescriptors = append(configDescriptors, configDescriptor)
+
+ configOption := models.NewEmptyOptionModel()
+ configOption.Config = configDescriptor.String()
+
+ schemeOption.ValueMap[target.Name] = configOption
+ }
+ } else {
+ for _, scheme := range schemes {
+ configDescriptor := ConfigDescriptor{
+ HasPodfile: workspace.IsPodWorkspace,
+ HasTest: scheme.HasXCTest,
+ MissingSharedSchemes: false,
+ }
+ configDescriptors = append(configDescriptors, configDescriptor)
+
+ configOption := models.NewEmptyOptionModel()
+ configOption.Config = configDescriptor.String()
+
+ schemeOption.ValueMap[scheme.Name] = configOption
+ }
+ }
+
+ projectPathOption.ValueMap[workspace.Pth] = schemeOption
+ }
+ // -----
+
+ if len(configDescriptors) == 0 {
+ log.Error("No valid iOS config found")
+ return models.OptionModel{}, warnings, errors.New("No valid config found")
+ }
+
+ scanner.configDescriptors = configDescriptors
+
+ return projectPathOption, warnings, nil
+}
+
+// DefaultOptions ...
+func (scanner *Scanner) DefaultOptions() models.OptionModel {
+ configOption := models.NewEmptyOptionModel()
+ configOption.Config = defaultConfigName
+
+ projectPathOption := models.NewOptionModel(projectPathTitle, projectPathEnvKey)
+ schemeOption := models.NewOptionModel(schemeTitle, schemeEnvKey)
+
+ schemeOption.ValueMap["_"] = configOption
+ projectPathOption.ValueMap["_"] = schemeOption
+
+ return projectPathOption
+}
+
+func generateConfig(hasPodfile, hasTest, missingSharedSchemes bool) bitriseModels.BitriseDataModel {
+ //
+ // Prepare steps
+ prepareSteps := []bitriseModels.StepListItemModel{}
+
+ // ActivateSSHKey
+ prepareSteps = append(prepareSteps, steps.ActivateSSHKeyStepListItem())
+
+ // GitClone
+ prepareSteps = append(prepareSteps, steps.GitCloneStepListItem())
+
+ // Script
+ prepareSteps = append(prepareSteps, steps.ScriptSteplistItem(steps.ScriptDefaultTitle))
+
+ // CertificateAndProfileInstaller
+ prepareSteps = append(prepareSteps, steps.CertificateAndProfileInstallerStepListItem())
+
+ if hasPodfile {
+ // CocoapodsInstall
+ prepareSteps = append(prepareSteps, steps.CocoapodsInstallStepListItem())
+ }
+
+ if missingSharedSchemes {
+ // RecreateUserSchemes
+ prepareSteps = append(prepareSteps, steps.RecreateUserSchemesStepListItem([]envmanModels.EnvironmentItemModel{
+ envmanModels.EnvironmentItemModel{projectPathKey: "$" + projectPathEnvKey},
+ }))
+ }
+ // ----------
+
+ //
+ // CI steps
+ ciSteps := append([]bitriseModels.StepListItemModel{}, prepareSteps...)
+
+ if hasTest {
+ // XcodeTestMac
+ ciSteps = append(ciSteps, steps.XcodeTestMacStepListItem([]envmanModels.EnvironmentItemModel{
+ envmanModels.EnvironmentItemModel{projectPathKey: "$" + projectPathEnvKey},
+ envmanModels.EnvironmentItemModel{schemeKey: "$" + schemeEnvKey},
+ }))
+ }
+
+ // DeployToBitriseIo
+ ciSteps = append(ciSteps, steps.DeployToBitriseIoStepListItem())
+ // ----------
+
+ //
+ // Deploy steps
+ deploySteps := append([]bitriseModels.StepListItemModel{}, prepareSteps...)
+
+ if hasTest {
+ // XcodeTestMac
+ deploySteps = append(deploySteps, steps.XcodeTestMacStepListItem([]envmanModels.EnvironmentItemModel{
+ envmanModels.EnvironmentItemModel{projectPathKey: "$" + projectPathEnvKey},
+ envmanModels.EnvironmentItemModel{schemeKey: "$" + schemeEnvKey},
+ }))
+ }
+
+ // XcodeArchiveMac
+ deploySteps = append(deploySteps, steps.XcodeArchiveMacStepListItem([]envmanModels.EnvironmentItemModel{
+ envmanModels.EnvironmentItemModel{projectPathKey: "$" + projectPathEnvKey},
+ envmanModels.EnvironmentItemModel{schemeKey: "$" + schemeEnvKey},
+ }))
+
+ // DeployToBitriseIo
+ deploySteps = append(deploySteps, steps.DeployToBitriseIoStepListItem())
+ // ----------
+
+ return models.BitriseDataWithCIAndCDWorkflow([]envmanModels.EnvironmentItemModel{}, ciSteps, deploySteps)
+}
+
+// Configs ...
+func (scanner *Scanner) Configs() (models.BitriseConfigMap, error) {
+ descriptors := []ConfigDescriptor{}
+ descritorNameMap := map[string]bool{}
+
+ for _, descriptor := range scanner.configDescriptors {
+ _, exist := descritorNameMap[descriptor.String()]
+ if !exist {
+ descriptors = append(descriptors, descriptor)
+ }
+ }
+
+ bitriseDataMap := models.BitriseConfigMap{}
+ for _, descriptor := range descriptors {
+ configName := descriptor.String()
+ bitriseData := generateConfig(descriptor.HasPodfile, descriptor.HasTest, descriptor.MissingSharedSchemes)
+ data, err := yaml.Marshal(bitriseData)
+ if err != nil {
+ return models.BitriseConfigMap{}, err
+ }
+ bitriseDataMap[configName] = string(data)
+ }
+
+ return bitriseDataMap, nil
+}
+
+// DefaultConfigs ...
+func (scanner *Scanner) DefaultConfigs() (models.BitriseConfigMap, error) {
+ //
+ // Prepare steps
+ prepareSteps := []bitriseModels.StepListItemModel{}
+
+ // ActivateSSHKey
+ prepareSteps = append(prepareSteps, steps.ActivateSSHKeyStepListItem())
+
+ // GitClone
+ prepareSteps = append(prepareSteps, steps.GitCloneStepListItem())
+
+ // Script
+ prepareSteps = append(prepareSteps, steps.ScriptSteplistItem(steps.ScriptDefaultTitle))
+
+ // CertificateAndProfileInstaller
+ prepareSteps = append(prepareSteps, steps.CertificateAndProfileInstallerStepListItem())
+
+ // CocoapodsInstall
+ prepareSteps = append(prepareSteps, steps.CocoapodsInstallStepListItem())
+
+ // RecreateUserSchemes
+ prepareSteps = append(prepareSteps, steps.RecreateUserSchemesStepListItem([]envmanModels.EnvironmentItemModel{
+ envmanModels.EnvironmentItemModel{projectPathKey: "$" + projectPathEnvKey},
+ }))
+ // ----------
+
+ //
+ // CI steps
+ ciSteps := append([]bitriseModels.StepListItemModel{}, prepareSteps...)
+
+ // XcodeTestMac
+ ciSteps = append(ciSteps, steps.XcodeTestMacStepListItem([]envmanModels.EnvironmentItemModel{
+ envmanModels.EnvironmentItemModel{projectPathKey: "$" + projectPathEnvKey},
+ envmanModels.EnvironmentItemModel{schemeKey: "$" + schemeEnvKey},
+ }))
+
+ // DeployToBitriseIo
+ ciSteps = append(ciSteps, steps.DeployToBitriseIoStepListItem())
+ // ----------
+
+ //
+ // Deploy steps
+ deploySteps := append([]bitriseModels.StepListItemModel{}, prepareSteps...)
+
+ // XcodeTestMac
+ deploySteps = append(deploySteps, steps.XcodeTestStepListItem([]envmanModels.EnvironmentItemModel{
+ envmanModels.EnvironmentItemModel{projectPathKey: "$" + projectPathEnvKey},
+ envmanModels.EnvironmentItemModel{schemeKey: "$" + schemeEnvKey},
+ }))
+
+ // XcodeArchiveMac
+ deploySteps = append(deploySteps, steps.XcodeArchiveMacStepListItem([]envmanModels.EnvironmentItemModel{
+ envmanModels.EnvironmentItemModel{projectPathKey: "$" + projectPathEnvKey},
+ envmanModels.EnvironmentItemModel{schemeKey: "$" + schemeEnvKey},
+ }))
+
+ // DeployToBitriseIo
+ deploySteps = append(deploySteps, steps.DeployToBitriseIoStepListItem())
+ // ----------
+
+ config := models.BitriseDataWithCIAndCDWorkflow([]envmanModels.EnvironmentItemModel{}, ciSteps, deploySteps)
+ data, err := yaml.Marshal(config)
+ if err != nil {
+ return models.BitriseConfigMap{}, err
+ }
+
+ configName := defaultConfigName
+ bitriseDataMap := models.BitriseConfigMap{}
+ bitriseDataMap[configName] = string(data)
+
+ return bitriseDataMap, nil
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/scanners/scanners.go b/go/src/github.com/bitrise-core/bitrise-init/scanners/scanners.go
index 3fd75146..f3831e9d 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/scanners/scanners.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/scanners/scanners.go
@@ -2,25 +2,68 @@ package scanners
import (
"github.com/bitrise-core/bitrise-init/models"
+ "github.com/bitrise-core/bitrise-init/scanners/android"
+ "github.com/bitrise-core/bitrise-init/scanners/fastlane"
+ "github.com/bitrise-core/bitrise-init/scanners/ios"
+ "github.com/bitrise-core/bitrise-init/scanners/xamarin"
"github.com/bitrise-core/bitrise-init/steps"
bitriseModels "github.com/bitrise-io/bitrise/models"
+ envmanModels "github.com/bitrise-io/envman/models"
"gopkg.in/yaml.v2"
)
// ScannerInterface ...
type ScannerInterface interface {
+ // The name of the scanner is used for logging and
+ // to store the scanner outputs, like warnings, options and configs.
+ // The outputs are stored in a map[NAME]OUTPUT, like: warningMap[ios]warnings, optionsMap[android]options, configMap[xamarin]configs, ...,
+ // this means, that the SCANNER NAME HAS TO BE UNIQUE.
+ // Returns:
+ // - the name of the scanner
Name() string
- Configure(searchDir string)
- DetectPlatform() (bool, error)
+ // Should implement as minimal logic as possible to determin if searchDir contains the - in question - platform or not.
+ // Inouts:
+ // - searchDir: the directory where the project to scann exists.
+ // Returns:
+ // - platform detected
+ // - error if (if any)
+ DetectPlatform(searchDir string) (bool, error)
+ // OptionModel is the model, used to store the available configuration combintaions.
+ // It defines option branches which leads different bitrise configurations.
+ // Each branch should define a complete and valid options to build the final bitrise config model.
+ // Every OptionModel branch's last options has to be the key of the workflow (in the BitriseConfigMap), which will fulfilled with the selected options.
+ // Returns:
+ // - an OptionModel
+ // - Warnings (if any)
+ // - error if (if any)
Options() (models.OptionModel, models.Warnings, error)
+
+ // Returns:
+ // - default options for the platform.
DefaultOptions() models.OptionModel
+ // BitriseConfigMap's each element is a bitrise config template which will fulfilled with the user selected options.
+ // Every config's key should be the last option one of the OptionModel branches.
+ // Returns:
+ // - platform BitriseConfigMap
Configs() (models.BitriseConfigMap, error)
+
+ // Returns:
+ // - platform default BitriseConfigMap
DefaultConfigs() (models.BitriseConfigMap, error)
}
+// ActiveScanners ...
+var ActiveScanners = []ScannerInterface{
+ new(ios.Scanner),
+ // new(macos.Scanner),
+ new(android.Scanner),
+ new(xamarin.Scanner),
+ new(fastlane.Scanner),
+}
+
func customConfigName() string {
return "custom-config"
}
@@ -37,9 +80,9 @@ func CustomConfig() (models.BitriseConfigMap, error) {
stepList = append(stepList, steps.GitCloneStepListItem())
// Script
- stepList = append(stepList, steps.ScriptSteplistItem(steps.TemplateScriptStepTitiel))
+ stepList = append(stepList, steps.ScriptSteplistItem(steps.ScriptDefaultTitle))
- bitriseData := models.BitriseDataWithDefaultTriggerMapAndPrimaryWorkflowSteps(stepList)
+ bitriseData := models.BitriseDataWithCIWorkflow([]envmanModels.EnvironmentItemModel{}, stepList)
data, err := yaml.Marshal(bitriseData)
if err != nil {
return map[string]string{}, err
diff --git a/go/src/github.com/bitrise-core/bitrise-init/scanners/xamarin/xamarin.go b/go/src/github.com/bitrise-core/bitrise-init/scanners/xamarin/xamarin.go
index 5c397d43..c3e68e89 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/scanners/xamarin/xamarin.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/scanners/xamarin/xamarin.go
@@ -309,16 +309,11 @@ func (scanner Scanner) Name() string {
return scannerName
}
-// Configure ...
-func (scanner *Scanner) Configure(searchDir string) {
- scanner.SearchDir = searchDir
-}
-
// DetectPlatform ...
-func (scanner *Scanner) DetectPlatform() (bool, error) {
- fileList, err := utility.FileList(scanner.SearchDir)
+func (scanner *Scanner) DetectPlatform(searchDir string) (bool, error) {
+ fileList, err := utility.FileList(searchDir)
if err != nil {
- return false, fmt.Errorf("failed to search for files in (%s), error: %s", scanner.SearchDir, err)
+ return false, fmt.Errorf("failed to search for files in (%s), error: %s", searchDir, err)
}
scanner.FileList = fileList
@@ -467,7 +462,7 @@ func (scanner *Scanner) Configs() (models.BitriseConfigMap, error) {
stepList = append(stepList, steps.GitCloneStepListItem())
// Script
- stepList = append(stepList, steps.ScriptSteplistItem(steps.TemplateScriptStepTitiel))
+ stepList = append(stepList, steps.ScriptSteplistItem(steps.ScriptDefaultTitle))
// CertificateAndProfileInstaller
stepList = append(stepList, steps.CertificateAndProfileInstallerStepListItem())
@@ -508,7 +503,7 @@ func (scanner *Scanner) Configs() (models.BitriseConfigMap, error) {
// DeployToBitriseIo
stepList = append(stepList, steps.DeployToBitriseIoStepListItem())
- bitriseData := models.BitriseDataWithDefaultTriggerMapAndPrimaryWorkflowSteps(stepList)
+ bitriseData := models.BitriseDataWithCIWorkflow([]envmanModels.EnvironmentItemModel{}, stepList)
data, err := yaml.Marshal(bitriseData)
if err != nil {
return models.BitriseConfigMap{}, err
@@ -533,7 +528,7 @@ func (scanner *Scanner) DefaultConfigs() (models.BitriseConfigMap, error) {
stepList = append(stepList, steps.GitCloneStepListItem())
// Script
- stepList = append(stepList, steps.ScriptSteplistItem(steps.TemplateScriptStepTitiel))
+ stepList = append(stepList, steps.ScriptSteplistItem(steps.ScriptDefaultTitle))
// CertificateAndProfileInstaller
stepList = append(stepList, steps.CertificateAndProfileInstallerStepListItem())
@@ -560,7 +555,7 @@ func (scanner *Scanner) DefaultConfigs() (models.BitriseConfigMap, error) {
// DeployToBitriseIo
stepList = append(stepList, steps.DeployToBitriseIoStepListItem())
- bitriseData := models.BitriseDataWithDefaultTriggerMapAndPrimaryWorkflowSteps(stepList)
+ bitriseData := models.BitriseDataWithCIWorkflow([]envmanModels.EnvironmentItemModel{}, stepList)
data, err := yaml.Marshal(bitriseData)
if err != nil {
return models.BitriseConfigMap{}, err
diff --git a/go/src/github.com/bitrise-core/bitrise-init/steps/steps.go b/go/src/github.com/bitrise-core/bitrise-init/steps/steps.go
index 2c3137b7..6f53c0b0 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/steps/steps.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/steps/steps.go
@@ -8,60 +8,106 @@ import (
)
const (
- // Common Step IDs
- activateSSHKeyID = "activate-ssh-key"
- activateSSHKeyVersion = "3.1.1"
-
- gitCloneID = "git-clone"
- gitCloneVersion = "3.4.1"
-
- certificateAndProfileInstallerID = "certificate-and-profile-installer"
- certificateAndProfileInstallerVersion = "1.8.1"
-
- deployToBitriseIoID = "deploy-to-bitrise-io"
- deployToBitriseIoVersion = "1.2.5"
-
- scriptID = "script"
- scriptVersion = "1.1.3"
-
- // Android Step IDs
- gradleRunnerID = "gradle-runner"
- gradleRunnerVersion = "1.5.2"
-
- // Fastlane Step IDs
- fastlaneID = "fastlane"
- fastlaneVersion = "2.2.0"
-
- // iOS Step IDs
- cocoapodsInstallID = "cocoapods-install"
- cocoapodsInstallVersion = "1.5.7"
-
- recreateUserSchemesID = "recreate-user-schemes"
- recreateUserSchemesVersion = "0.9.4"
-
- xcodeArchiveID = "xcode-archive"
- xcodeArchiveVersion = "1.10.1"
-
- xcodeTestID = "xcode-test"
- xcodeTestVersion = "1.17.1"
-
- // Xamarin Step IDs
- xamarinUserManagementID = "xamarin-user-management"
- xamarinUserManagementVersion = "1.0.3"
-
- nugetRestoreID = "nuget-restore"
- nugetRestoreVersion = "1.0.1"
-
- xamarinComponentsRestoreID = "xamarin-components-restore"
- xamarinComponentsRestoreVersion = "0.9.0"
-
- xamarinArchiveID = "xamarin-archive"
- xamarinArchiveVersion = "1.1.1"
+ // Common Steps
+
+ // ActivateSSHKeyID ...
+ ActivateSSHKeyID = "activate-ssh-key"
+ // ActivateSSHKeyVersion ...
+ ActivateSSHKeyVersion = "3.1.1"
+
+ // GitCloneID ...
+ GitCloneID = "git-clone"
+ // GitCloneVersion ...
+ GitCloneVersion = "3.4.1"
+
+ // CertificateAndProfileInstallerID ...
+ CertificateAndProfileInstallerID = "certificate-and-profile-installer"
+ // CertificateAndProfileInstallerVersion ...
+ CertificateAndProfileInstallerVersion = "1.8.1"
+
+ // DeployToBitriseIoID ...
+ DeployToBitriseIoID = "deploy-to-bitrise-io"
+ // DeployToBitriseIoVersion ...
+ DeployToBitriseIoVersion = "1.2.5"
+
+ // ScriptID ...
+ ScriptID = "script"
+ // ScriptVersion ...
+ ScriptVersion = "1.1.3"
+ // ScriptDefaultTitle ...
+ ScriptDefaultTitle = "Do anything with Script step"
+
+ // Android Steps
+
+ // GradleRunnerID ...
+ GradleRunnerID = "gradle-runner"
+ // GradleRunnerVersion ...
+ GradleRunnerVersion = "1.5.2"
+
+ // Fastlane Steps
+
+ // FastlaneID ...
+ FastlaneID = "fastlane"
+ // FastlaneVersion ...
+ FastlaneVersion = "2.2.0"
+
+ // iOS Steps
+
+ // CocoapodsInstallID ...
+ CocoapodsInstallID = "cocoapods-install"
+ // CocoapodsInstallVersion ...
+ CocoapodsInstallVersion = "1.5.8"
+
+ // RecreateUserSchemesID ...
+ RecreateUserSchemesID = "recreate-user-schemes"
+ // RecreateUserSchemesVersion ...
+ RecreateUserSchemesVersion = "0.9.4"
+
+ // XcodeArchiveID ...
+ XcodeArchiveID = "xcode-archive"
+ // XcodeArchiveVersion ...
+ XcodeArchiveVersion = "2.0.4"
+
+ // XcodeTestID ...
+ XcodeTestID = "xcode-test"
+ // XcodeTestVersion ...
+ XcodeTestVersion = "1.18.1"
+
+ // Xamarin Steps
+
+ // XamarinUserManagementID ...
+ XamarinUserManagementID = "xamarin-user-management"
+ // XamarinUserManagementVersion ...
+ XamarinUserManagementVersion = "1.0.3"
+
+ // NugetRestoreID ...
+ NugetRestoreID = "nuget-restore"
+ // NugetRestoreVersion ...
+ NugetRestoreVersion = "1.0.3"
+
+ // XamarinComponentsRestoreID ...
+ XamarinComponentsRestoreID = "xamarin-components-restore"
+ // XamarinComponentsRestoreVersion ...
+ XamarinComponentsRestoreVersion = "0.9.0"
+
+ // XamarinArchiveID ...
+ XamarinArchiveID = "xamarin-archive"
+ // XamarinArchiveVersion ...
+ XamarinArchiveVersion = "1.1.1"
+
+ // macOS Setps
+
+ // XcodeArchiveMacID ...
+ XcodeArchiveMacID = "xcode-archive-mac"
+ // XcodeArchiveMacVersion ...
+ XcodeArchiveMacVersion = "1.3.2"
+
+ // XcodeTestMacID ...
+ XcodeTestMacID = "xcode-test-mac"
+ // XcodeTestMacVersion ...
+ XcodeTestMacVersion = "1.0.5"
)
-// TemplateScriptStepTitiel ...
-const TemplateScriptStepTitiel = "Do anything with Script step"
-
func stepIDComposite(ID, version string) string {
return ID + "@" + version
}
@@ -89,32 +135,32 @@ func stepListItem(stepIDComposite, title, runIf string, inputs []envman.Environm
// ActivateSSHKeyStepListItem ...
func ActivateSSHKeyStepListItem() bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(activateSSHKeyID, activateSSHKeyVersion)
+ stepIDComposite := stepIDComposite(ActivateSSHKeyID, ActivateSSHKeyVersion)
runIf := `{{getenv "SSH_RSA_PRIVATE_KEY" | ne ""}}`
return stepListItem(stepIDComposite, "", runIf, nil)
}
// GitCloneStepListItem ...
func GitCloneStepListItem() bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(gitCloneID, gitCloneVersion)
+ stepIDComposite := stepIDComposite(GitCloneID, GitCloneVersion)
return stepListItem(stepIDComposite, "", "", nil)
}
// CertificateAndProfileInstallerStepListItem ...
func CertificateAndProfileInstallerStepListItem() bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(certificateAndProfileInstallerID, certificateAndProfileInstallerVersion)
+ stepIDComposite := stepIDComposite(CertificateAndProfileInstallerID, CertificateAndProfileInstallerVersion)
return stepListItem(stepIDComposite, "", "", nil)
}
// DeployToBitriseIoStepListItem ...
func DeployToBitriseIoStepListItem() bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(deployToBitriseIoID, deployToBitriseIoVersion)
+ stepIDComposite := stepIDComposite(DeployToBitriseIoID, DeployToBitriseIoVersion)
return stepListItem(stepIDComposite, "", "", nil)
}
// ScriptSteplistItem ...
func ScriptSteplistItem(title string, inputs ...envman.EnvironmentItemModel) bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(scriptID, scriptVersion)
+ stepIDComposite := stepIDComposite(ScriptID, ScriptVersion)
return stepListItem(stepIDComposite, title, "", inputs)
}
@@ -124,7 +170,7 @@ func ScriptSteplistItem(title string, inputs ...envman.EnvironmentItemModel) bit
// GradleRunnerStepListItem ...
func GradleRunnerStepListItem(inputs []envman.EnvironmentItemModel) bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(gradleRunnerID, gradleRunnerVersion)
+ stepIDComposite := stepIDComposite(GradleRunnerID, GradleRunnerVersion)
return stepListItem(stepIDComposite, "", "", inputs)
}
@@ -134,7 +180,7 @@ func GradleRunnerStepListItem(inputs []envman.EnvironmentItemModel) bitrise.Step
// FastlaneStepListItem ...
func FastlaneStepListItem(inputs []envman.EnvironmentItemModel) bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(fastlaneID, fastlaneVersion)
+ stepIDComposite := stepIDComposite(FastlaneID, FastlaneVersion)
return stepListItem(stepIDComposite, "", "", inputs)
}
@@ -144,25 +190,25 @@ func FastlaneStepListItem(inputs []envman.EnvironmentItemModel) bitrise.StepList
// CocoapodsInstallStepListItem ...
func CocoapodsInstallStepListItem() bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(cocoapodsInstallID, cocoapodsInstallVersion)
+ stepIDComposite := stepIDComposite(CocoapodsInstallID, CocoapodsInstallVersion)
return stepListItem(stepIDComposite, "", "", nil)
}
// RecreateUserSchemesStepListItem ...
func RecreateUserSchemesStepListItem(inputs []envman.EnvironmentItemModel) bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(recreateUserSchemesID, recreateUserSchemesVersion)
+ stepIDComposite := stepIDComposite(RecreateUserSchemesID, RecreateUserSchemesVersion)
return stepListItem(stepIDComposite, "", "", inputs)
}
// XcodeArchiveStepListItem ...
func XcodeArchiveStepListItem(inputs []envman.EnvironmentItemModel) bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(xcodeArchiveID, xcodeArchiveVersion)
+ stepIDComposite := stepIDComposite(XcodeArchiveID, XcodeArchiveVersion)
return stepListItem(stepIDComposite, "", "", inputs)
}
// XcodeTestStepListItem ...
func XcodeTestStepListItem(inputs []envman.EnvironmentItemModel) bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(xcodeTestID, xcodeTestVersion)
+ stepIDComposite := stepIDComposite(XcodeTestID, XcodeTestVersion)
return stepListItem(stepIDComposite, "", "", inputs)
}
@@ -172,25 +218,41 @@ func XcodeTestStepListItem(inputs []envman.EnvironmentItemModel) bitrise.StepLis
// XamarinUserManagementStepListItem ...
func XamarinUserManagementStepListItem(inputs []envman.EnvironmentItemModel) bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(xamarinUserManagementID, xamarinUserManagementVersion)
+ stepIDComposite := stepIDComposite(XamarinUserManagementID, XamarinUserManagementVersion)
runIf := ".IsCI"
return stepListItem(stepIDComposite, "", runIf, inputs)
}
// NugetRestoreStepListItem ...
func NugetRestoreStepListItem() bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(nugetRestoreID, nugetRestoreVersion)
+ stepIDComposite := stepIDComposite(NugetRestoreID, NugetRestoreVersion)
return stepListItem(stepIDComposite, "", "", nil)
}
// XamarinComponentsRestoreStepListItem ...
func XamarinComponentsRestoreStepListItem() bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(xamarinComponentsRestoreID, xamarinComponentsRestoreVersion)
+ stepIDComposite := stepIDComposite(XamarinComponentsRestoreID, XamarinComponentsRestoreVersion)
return stepListItem(stepIDComposite, "", "", nil)
}
// XamarinArchiveStepListItem ...
func XamarinArchiveStepListItem(inputs []envman.EnvironmentItemModel) bitrise.StepListItemModel {
- stepIDComposite := stepIDComposite(xamarinArchiveID, xamarinArchiveVersion)
+ stepIDComposite := stepIDComposite(XamarinArchiveID, XamarinArchiveVersion)
+ return stepListItem(stepIDComposite, "", "", inputs)
+}
+
+//------------------------
+// macOS Step List Items
+//------------------------
+
+// XcodeArchiveMacStepListItem ...
+func XcodeArchiveMacStepListItem(inputs []envman.EnvironmentItemModel) bitrise.StepListItemModel {
+ stepIDComposite := stepIDComposite(XcodeArchiveMacID, XcodeArchiveMacVersion)
+ return stepListItem(stepIDComposite, "", "", inputs)
+}
+
+// XcodeTestMacStepListItem ...
+func XcodeTestMacStepListItem(inputs []envman.EnvironmentItemModel) bitrise.StepListItemModel {
+ stepIDComposite := stepIDComposite(XcodeTestMacID, XcodeTestMacVersion)
return stepListItem(stepIDComposite, "", "", inputs)
}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/utility/podfile.go b/go/src/github.com/bitrise-core/bitrise-init/utility/podfile.go
new file mode 100644
index 00000000..89aa73b2
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/utility/podfile.go
@@ -0,0 +1,73 @@
+package utility
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+
+ "github.com/bitrise-io/go-utils/fileutil"
+)
+
+const getWorkspacePathGemfileContent = `source 'https://rubygems.org'
+gem 'cocoapods-core'
+`
+
+const getWorkspacePathRubyScriptContent = `require 'cocoapods-core'
+podfile_path = ENV['PODFILE_PATH']
+podfile = Pod::Podfile.from_file(podfile_path)
+puts podfile.workspace_path
+`
+
+// GetWorkspaceProjectMap ...
+func GetWorkspaceProjectMap(podfilePth string) (map[string]string, error) {
+ // fix podfile quotation
+ podfileContent, err := fileutil.ReadStringFromFile(podfilePth)
+ if err != nil {
+ return map[string]string{}, err
+ }
+
+ podfileContent = strings.Replace(podfileContent, `‘`, `'`, -1)
+ podfileContent = strings.Replace(podfileContent, `’`, `'`, -1)
+ podfileContent = strings.Replace(podfileContent, `“`, `"`, -1)
+ podfileContent = strings.Replace(podfileContent, `”`, `"`, -1)
+
+ if err := fileutil.WriteStringToFile(podfilePth, podfileContent); err != nil {
+ return map[string]string{}, err
+ }
+ // ----
+
+ envs := []string{fmt.Sprintf("PODFILE_PATH=%s", podfilePth)}
+ podfileDir := filepath.Dir(podfilePth)
+
+ workspaceBase, err := runRubyScriptForOutput(getWorkspacePathRubyScriptContent, getWorkspacePathGemfileContent, podfileDir, envs)
+ if err != nil {
+ return map[string]string{}, err
+ }
+
+ pattern := filepath.Join(podfileDir, "*.xcodeproj")
+ projects, err := filepath.Glob(pattern)
+ if err != nil {
+ return map[string]string{}, err
+ }
+
+ if len(projects) > 1 {
+ return map[string]string{}, fmt.Errorf("more then 1 xcodeproj exist in Podfile's dir")
+ } else if len(projects) == 0 {
+ return map[string]string{}, fmt.Errorf("no xcodeproj exist in Podfile's dir")
+ }
+
+ project := projects[0]
+ workspace := ""
+
+ if workspaceBase != "" {
+ workspace = filepath.Join(podfileDir, workspaceBase)
+ } else {
+ projectBasename := filepath.Base(project)
+ projectName := strings.TrimSuffix(projectBasename, ".xcodeproj")
+ workspace = filepath.Join(podfileDir, projectName+".xcworkspace")
+ }
+
+ return map[string]string{
+ workspace: project,
+ }, nil
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/utility/podfile_util_test.go b/go/src/github.com/bitrise-core/bitrise-init/utility/podfile_test.go
similarity index 83%
rename from go/src/github.com/bitrise-core/bitrise-init/utility/podfile_util_test.go
rename to go/src/github.com/bitrise-core/bitrise-init/utility/podfile_test.go
index adcb8323..8a01da9b 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/utility/podfile_util_test.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/utility/podfile_test.go
@@ -12,43 +12,6 @@ import (
"github.com/stretchr/testify/require"
)
-func TestIsWorkspaceSpecified(t *testing.T) {
- t.Log("podfile - no workspace defined")
- {
- podfile := `platform :ios, '9.0'
-pod 'Alamofire', '~> 3.4'
-`
- require.Equal(t, false, isWorkspaceSpecified(podfile))
- }
-
- t.Log("podfile - workspace defined")
- {
- podfile := `platform :ios, '9.0'
-pod 'Alamofire', '~> 3.4'
-workspace 'MyWorkspace'
-`
- require.Equal(t, true, isWorkspaceSpecified(podfile))
- }
-
- t.Log("podfile - workspace defined with whitespace")
- {
- podfile := `platform :ios, '9.0'
-pod 'Alamofire', '~> 3.4'
- workspace 'MyWorkspace'
-`
- require.Equal(t, true, isWorkspaceSpecified(podfile))
- }
-
- t.Log("podfile - workspace defined with tab")
- {
- podfile := `platform :ios, '9.0'
-pod 'Alamofire', '~> 3.4'
- workspace 'MyWorkspace'
-`
- require.Equal(t, true, isWorkspaceSpecified(podfile))
- }
-}
-
func TestGetWorkspaceProjectMap(t *testing.T) {
// ---------------------
// No workspace defined
@@ -63,7 +26,7 @@ pod 'Alamofire', '~> 3.4'
podfilePth := filepath.Join(tmpDir, "Podfile")
require.NoError(t, fileutil.WriteStringToFile(podfilePth, podfile))
- workspaceProjectMap, err := getWorkspaceProjectMap(podfilePth)
+ workspaceProjectMap, err := GetWorkspaceProjectMap(podfilePth)
require.Error(t, err)
require.Equal(t, 0, len(workspaceProjectMap))
@@ -85,7 +48,7 @@ pod 'Alamofire', '~> 3.4'
projectPth := filepath.Join(tmpDir, "project.xcodeproj")
require.NoError(t, fileutil.WriteStringToFile(projectPth, project))
- workspaceProjectMap, err := getWorkspaceProjectMap(podfilePth)
+ workspaceProjectMap, err := GetWorkspaceProjectMap(podfilePth)
require.NoError(t, err)
require.Equal(t, 1, len(workspaceProjectMap))
@@ -122,7 +85,7 @@ pod 'Alamofire', '~> 3.4'
project2Pth := filepath.Join(tmpDir, "project2.xcodeproj")
require.NoError(t, fileutil.WriteStringToFile(project2Pth, project2))
- workspaceProjectMap, err := getWorkspaceProjectMap(podfilePth)
+ workspaceProjectMap, err := GetWorkspaceProjectMap(podfilePth)
require.Error(t, err)
require.Equal(t, 0, len(workspaceProjectMap))
@@ -143,7 +106,7 @@ workspace 'MyWorkspace'
podfilePth := filepath.Join(tmpDir, "Podfile")
require.NoError(t, fileutil.WriteStringToFile(podfilePth, podfile))
- workspaceProjectMap, err := getWorkspaceProjectMap(podfilePth)
+ workspaceProjectMap, err := GetWorkspaceProjectMap(podfilePth)
require.Error(t, err)
require.Equal(t, 0, len(workspaceProjectMap))
@@ -166,7 +129,7 @@ workspace 'MyWorkspace'
projectPth := filepath.Join(tmpDir, "project.xcodeproj")
require.NoError(t, fileutil.WriteStringToFile(projectPth, project))
- workspaceProjectMap, err := getWorkspaceProjectMap(podfilePth)
+ workspaceProjectMap, err := GetWorkspaceProjectMap(podfilePth)
require.NoError(t, err)
require.Equal(t, 1, len(workspaceProjectMap))
@@ -204,7 +167,7 @@ workspace 'MyWorkspace'
project2Pth := filepath.Join(tmpDir, "project2.xcodeproj")
require.NoError(t, fileutil.WriteStringToFile(project2Pth, project2))
- workspaceProjectMap, err := getWorkspaceProjectMap(podfilePth)
+ workspaceProjectMap, err := GetWorkspaceProjectMap(podfilePth)
require.Error(t, err)
require.Equal(t, 0, len(workspaceProjectMap))
@@ -227,7 +190,7 @@ workspace ‘MyWorkspace’
projectPth := filepath.Join(tmpDir, "project.xcodeproj")
require.NoError(t, fileutil.WriteStringToFile(projectPth, project))
- workspaceProjectMap, err := getWorkspaceProjectMap(podfilePth)
+ workspaceProjectMap, err := GetWorkspaceProjectMap(podfilePth)
require.NoError(t, err)
require.Equal(t, 1, len(workspaceProjectMap))
diff --git a/go/src/github.com/bitrise-core/bitrise-init/utility/podfile_util.go b/go/src/github.com/bitrise-core/bitrise-init/utility/podfile_util.go
deleted file mode 100644
index 713450ef..00000000
--- a/go/src/github.com/bitrise-core/bitrise-init/utility/podfile_util.go
+++ /dev/null
@@ -1,321 +0,0 @@
-package utility
-
-import (
- "encoding/json"
- "errors"
- "fmt"
- "os"
- "path"
- "path/filepath"
- "regexp"
- "strings"
-
- "github.com/bitrise-io/go-utils/cmdex"
- "github.com/bitrise-io/go-utils/errorutil"
- "github.com/bitrise-io/go-utils/fileutil"
- "github.com/bitrise-io/go-utils/pathutil"
-)
-
-const podfileRubyFileContent = `class Podfile
- def method_missing symbol, *args
- end
-
- def Object.const_missing const, *args
- end
-
- def uninitialized_constant constant, *args
- puts "Unitialized Constant: #{constant}"
- puts args
- end
-
- def apply_quotation_fix(str)
- fixed = str.tr('‘', "'")
- fixed = fixed.tr('’', "'")
- fixed = fixed.tr('“', '"')
- fixed = fixed.tr('”', '"')
- return fixed
- end
-
- def self.from_file(path)
- Podfile.new do
- @full_path = File.expand_path(path)
- @base_dir = File.dirname(@full_path)
-
- original = File.open(@full_path).read
- fixed = apply_quotation_fix(original)
-
- eval(fixed, nil, path)
- end
- end
-
- def initialize(&block)
- @dependencies = []
- instance_eval(&block)
- end
-
- def target(target_name, *args, &block)
- target_dict = {
- target: target_name,
- project: nil,
- workspace: nil,
- targets: []
- }
-
- parent_target = @current_target
- @current_target = target_dict
-
- block.call(self) if block
-
- if parent_target
- parent_target[:targets] << @current_target
- else
- (@targets ||= []) << @current_target
- end
- @current_target = parent_target
- end
-
- def project(project, *args)
- project = File.join(File.dirname(project), File.basename(project, File.extname(project)))
-
- if @current_target
- @current_target[:project] = project
- else
- @base_project = project
- end
- end
-
- def xcodeproj(project, *args)
- project(project, args)
- end
-
- def workspace(workspace, *args)
- workspace = File.join(File.dirname(workspace), File.basename(workspace, File.extname(workspace)))
-
- if @current_target
- @current_target.workspace = workspace
- else
- @base_workspace = workspace
- end
- end
-
- # Helper
-
- def fix_targets(dict, parent)
- # If no explicit project is specified, it will use the Xcode project of the parent target.
- if parent != nil
- dict[:project] = parent[:project] unless dict[:project]
- end
-
- if dict[:project] == nil
- # If none of the target definitions specify an explicit project and there is only one project in the same directory
- # as the Podfile then that project will be used.
- projects = Dir[File.join(@base_dir, "*.xcodeproj")]
-
- if projects.count == 0
- dict[:error] = "No project found for Podfile at path: #{@base_dir}"
- else
- if projects.count > 1
- dict[:error] = "Multiple projects found for Podfile at path: #{@base_dir}. Check this reference for help: https://guides.cocoapods.org/syntax/podfile.html#xcodeproj"
- end
-
- dict[:project] = File.basename(projects.first, ".*")
- end
- end
-
- if dict[:project] != nil
- # Check if the file exists
- project_path = File.join(@base_dir, "#{dict[:project]}.xcodeproj")
- unless File.exists?(project_path)
- dict[:error] = "No project found at path: #{project_path}"
- end
-
- # If no explicit Xcode workspace is specified and only one project exists in the same directory as the Podfile,
- # then the name of that project is used as the workspace’s name.
- if dict[:workspace] == nil
- if dict[:project] != nil
- dict[:workspace] = File.basename(dict[:project], '.*')
- end
- end
- end
-
- dict[:targets].each do |t|
- fix_targets(t, dict)
- end
-
- dict
- end
-
- def list_targets
- base_target = {
- target: "",
- project: @base_project,
- workspace: @base_workspace,
- targets: @targets || []
- }
-
- [fix_targets(base_target, nil)]
- end
-
- def get_workspaces(dict)
- @workspaces ||= {}
-
- if dict[:error] == nil
- project_path = File.expand_path(File.join(@base_dir, "#{dict[:project]}.xcodeproj"))
- dir = File.dirname(project_path)
- workspace_path = File.join(dir, "#{dict[:workspace]}.xcworkspace")
-
- @workspaces[workspace_path] = project_path
- else
- puts dict[:error].to_s
- exit(1)
- end
-
- dict[:targets].each do |target|
- get_workspaces(target)
- end
-
- @workspaces
- end
-end
-`
-
-const getWorkspacesRubyFileContent = `require_relative 'podfile'
-require 'json'
-
-path = ENV['pod_file_path']
-
-begin
- podfile = Podfile.from_file(path)
- workspaces = podfile.get_workspaces(podfile.list_targets.first)
-rescue => ex
- puts(ex.inspect.to_s)
- puts('--- Stack trace: ---')
- puts(ex.backtrace.to_s)
- exit(1)
-end
-
-puts workspaces.to_json
-`
-
-var (
- log = NewLogger()
-)
-
-func isWorkspaceSpecified(podfileContent string) bool {
- re := regexp.MustCompile(`\s*workspace (.+)`)
- lines := strings.Split(podfileContent, "\n")
- for _, line := range lines {
- if re.FindString(line) != "" {
- return true
- }
- }
-
- return false
-}
-
-func getWorkspaceProjectMap(podfilePth string) (map[string]string, error) {
- // Run simply Podfile anaylzer
- podfileContent, err := fileutil.ReadStringFromFile(podfilePth)
- if err != nil {
- return map[string]string{}, err
- }
-
- if !isWorkspaceSpecified(podfileContent) {
- log.Details("workspace not specified in podfile")
- // If no explicit Xcode workspace is specified and
- // only one project exists in the same directory as the Podfile,
- // then the name of that project is used as the workspace’s name.
- podfileDir := filepath.Dir(podfilePth)
- pattern := filepath.Join(podfileDir, "*.xcodeproj")
- projects, err := filepath.Glob(pattern)
- if err != nil {
- return map[string]string{}, err
- }
-
- if len(projects) > 1 {
- return map[string]string{}, fmt.Errorf("failed to determin workspace name: no workspace specified in the Podfile and more then one xcodeproj exist in Podfile's dir")
- }
-
- if len(projects) == 1 {
- project := projects[0]
- projectBasename := filepath.Base(project)
- projectName := strings.TrimSuffix(projectBasename, ".xcodeproj")
- workspace := filepath.Join(podfileDir, projectName+".xcworkspace")
-
- return map[string]string{
- workspace: project,
- }, nil
- }
- }
-
- log.Warn("Workspace specified in podfile (%s)", podfilePth)
- log.Warn("Running extended podfile analyzer")
-
- // Analyze Podfile as a ruby file
- if err := os.Setenv("pod_file_path", podfilePth); err != nil {
- return map[string]string{}, err
- }
-
- tmpDir, err := pathutil.NormalizedOSTempDirPath("bitrise-init")
- if err != nil {
- return map[string]string{}, err
- }
-
- podfileRubyFilePath := path.Join(tmpDir, "podfile.rb")
- if err := fileutil.WriteStringToFile(podfileRubyFilePath, podfileRubyFileContent); err != nil {
- return map[string]string{}, err
- }
-
- getWorkspacesRubyFilePath := path.Join(tmpDir, "get_workspace.rb")
- if err := fileutil.WriteStringToFile(getWorkspacesRubyFilePath, getWorkspacesRubyFileContent); err != nil {
- return map[string]string{}, err
- }
-
- out, err := cmdex.RunCommandAndReturnCombinedStdoutAndStderr("ruby", getWorkspacesRubyFilePath)
- if err != nil {
- log.Warn("Extended analyzer failed against podfile:")
- fmt.Println(podfileContent)
-
- if errorutil.IsExitStatusError(err) {
- return map[string]string{}, errors.New(out)
- }
- return map[string]string{}, err
- }
-
- workspaceMap := map[string]string{}
- if err := json.Unmarshal([]byte(out), &workspaceMap); err != nil {
- return map[string]string{}, err
- }
-
- return workspaceMap, nil
-}
-
-// GetRelativeWorkspaceProjectPathMap ...
-func GetRelativeWorkspaceProjectPathMap(podfilePth, baseDir string) (map[string]string, error) {
- absPodfilePth, err := pathutil.AbsPath(podfilePth)
- if err != nil {
- return map[string]string{}, err
- }
-
- workspaceMap, err := getWorkspaceProjectMap(absPodfilePth)
- if err != nil {
- return map[string]string{}, err
- }
-
- normalizedWorkspaceMap := map[string]string{}
- for workspace, project := range workspaceMap {
- relWorkspacePath, err := filepath.Rel(baseDir, workspace)
- if err != nil {
- return map[string]string{}, err
- }
-
- relProjectPath, err := filepath.Rel(baseDir, project)
- if err != nil {
- return map[string]string{}, err
- }
-
- normalizedWorkspaceMap[relWorkspacePath] = relProjectPath
- }
-
- return normalizedWorkspaceMap, nil
-}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/utility/rubyscript.go b/go/src/github.com/bitrise-core/bitrise-init/utility/rubyscript.go
new file mode 100644
index 00000000..c21c562f
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/utility/rubyscript.go
@@ -0,0 +1,74 @@
+package utility
+
+import (
+ "errors"
+ "path"
+
+ "github.com/bitrise-io/go-utils/cmdex"
+ "github.com/bitrise-io/go-utils/errorutil"
+ "github.com/bitrise-io/go-utils/fileutil"
+ "github.com/bitrise-io/go-utils/pathutil"
+)
+
+func runRubyScriptForOutput(scriptContent, gemfileContent, inDir string, withEnvs []string) (string, error) {
+ tmpDir, err := pathutil.NormalizedOSTempDirPath("__bitrise-init__")
+ if err != nil {
+ return "", err
+ }
+
+ // Write Gemfile to file and install
+ if gemfileContent != "" {
+ gemfilePth := path.Join(tmpDir, "Gemfile")
+ if err := fileutil.WriteStringToFile(gemfilePth, gemfileContent); err != nil {
+ return "", err
+ }
+
+ cmd := cmdex.NewCommand("bundle", "install")
+
+ if inDir != "" {
+ cmd.SetDir(inDir)
+ }
+
+ withEnvs = append(withEnvs, "BUNDLE_GEMFILE="+gemfilePth)
+ cmd.AppendEnvs(withEnvs)
+
+ if out, err := cmd.RunAndReturnTrimmedCombinedOutput(); err != nil {
+ if errorutil.IsExitStatusError(err) {
+ return "", errors.New(out)
+ }
+ return "", err
+ }
+ }
+
+ // Write script to file and run
+ rubyScriptPth := path.Join(tmpDir, "script.rb")
+ if err := fileutil.WriteStringToFile(rubyScriptPth, scriptContent); err != nil {
+ return "", err
+ }
+
+ var cmd *cmdex.CommandModel
+
+ if gemfileContent != "" {
+ cmd = cmdex.NewCommand("bundle", "exec", "ruby", rubyScriptPth)
+ } else {
+ cmd = cmdex.NewCommand("ruby", rubyScriptPth)
+ }
+
+ if inDir != "" {
+ cmd.SetDir(inDir)
+ }
+
+ if len(withEnvs) > 0 {
+ cmd.AppendEnvs(withEnvs)
+ }
+
+ out, err := cmd.RunAndReturnTrimmedCombinedOutput()
+ if err != nil {
+ if errorutil.IsExitStatusError(err) {
+ return "", errors.New(out)
+ }
+ return "", err
+ }
+
+ return out, nil
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/utility/xcodeproj.go b/go/src/github.com/bitrise-core/bitrise-init/utility/xcodeproj.go
new file mode 100644
index 00000000..c82c90ba
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/utility/xcodeproj.go
@@ -0,0 +1,153 @@
+package utility
+
+import (
+ "errors"
+ "os"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strings"
+
+ "github.com/bitrise-tools/go-xcode/xcodeproj"
+)
+
+var (
+ embeddedWorkspacePathRegexp = regexp.MustCompile(`.+\.xcodeproj/.+\.xcworkspace`)
+ scanProjectPathRegexpBlackList = []*regexp.Regexp{embeddedWorkspacePathRegexp}
+
+ gitFolderName = ".git"
+ podsFolderName = "Pods"
+ carthageFolderName = "Carthage"
+ scanFolderNameBlackList = []string{gitFolderName, podsFolderName, carthageFolderName}
+
+ frameworkExt = ".framework"
+ scanFolderExtBlackList = []string{frameworkExt}
+)
+
+func isPathMatchRegexp(pth string, regexp *regexp.Regexp) bool {
+ return (regexp.FindString(pth) != "")
+}
+
+func isPathContainsComponent(pth, component string) bool {
+ pathComponents := strings.Split(pth, string(filepath.Separator))
+ for _, c := range pathComponents {
+ if c == component {
+ return true
+ }
+ }
+ return false
+}
+
+func isPathContainsComponentWithExtension(pth, ext string) bool {
+ pathComponents := strings.Split(pth, string(filepath.Separator))
+ for _, c := range pathComponents {
+ e := filepath.Ext(c)
+ if e == ext {
+ return true
+ }
+ }
+ return false
+}
+
+func isDir(pth string) (bool, error) {
+ fileInf, err := os.Lstat(pth)
+ if err != nil {
+ return false, err
+ }
+ if fileInf == nil {
+ return false, errors.New("no file info available")
+ }
+ return fileInf.IsDir(), nil
+}
+
+func isRelevantProject(pth string, isTest bool) (bool, error) {
+ // xcodeproj & xcworkspace should be a dir
+ if !isTest {
+ if is, err := isDir(pth); err != nil {
+ return false, err
+ } else if !is {
+ return false, nil
+ }
+ }
+
+ for _, regexp := range scanProjectPathRegexpBlackList {
+ if isPathMatchRegexp(pth, regexp) {
+ return false, nil
+ }
+ }
+
+ for _, folderName := range scanFolderNameBlackList {
+ if isPathContainsComponent(pth, folderName) {
+ return false, nil
+ }
+ }
+
+ for _, folderExt := range scanFolderExtBlackList {
+ if isPathContainsComponentWithExtension(pth, folderExt) {
+ return false, nil
+ }
+ }
+
+ return true, nil
+}
+
+// FilterRelevantXcodeProjectFiles ...
+func FilterRelevantXcodeProjectFiles(fileList []string, isTest bool) ([]string, error) {
+ filteredFiles := FilterFilesWithExtensions(fileList, xcodeproj.XCodeProjExt, xcodeproj.XCWorkspaceExt)
+ relevantFiles := []string{}
+
+ for _, file := range filteredFiles {
+ is, err := isRelevantProject(file, isTest)
+ if err != nil {
+ return []string{}, err
+ } else if !is {
+ continue
+ }
+
+ relevantFiles = append(relevantFiles, file)
+ }
+
+ sort.Sort(ByComponents(relevantFiles))
+
+ return relevantFiles, nil
+}
+
+func isRelevantPodfile(pth string) bool {
+ basename := filepath.Base(pth)
+ if !CaseInsensitiveEquals(basename, "podfile") {
+ return false
+ }
+
+ for _, folderName := range scanFolderNameBlackList {
+ if isPathContainsComponent(pth, folderName) {
+ return false
+ }
+ }
+
+ for _, folderExt := range scanFolderExtBlackList {
+ if isPathContainsComponentWithExtension(pth, folderExt) {
+ return false
+ }
+ }
+
+ return true
+}
+
+// FilterRelevantPodFiles ...
+func FilterRelevantPodFiles(fileList []string) []string {
+ podfiles := []string{}
+
+ for _, file := range fileList {
+ if isRelevantPodfile(file) {
+ podfiles = append(podfiles, file)
+ }
+ }
+
+ if len(podfiles) == 0 {
+ return []string{}
+ }
+
+ sort.Sort(ByComponents(podfiles))
+
+ return podfiles
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/scanners/ios/ios_test.go b/go/src/github.com/bitrise-core/bitrise-init/utility/xcodeproj_test.go
similarity index 96%
rename from go/src/github.com/bitrise-core/bitrise-init/scanners/ios/ios_test.go
rename to go/src/github.com/bitrise-core/bitrise-init/utility/xcodeproj_test.go
index a3602bb5..33cbf722 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/scanners/ios/ios_test.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/utility/xcodeproj_test.go
@@ -1,4 +1,4 @@
-package ios
+package utility
import (
"os"
@@ -250,7 +250,7 @@ func TestIsRelevantProject(t *testing.T) {
}
}
-func TestFilterXcodeprojectFiles(t *testing.T) {
+func TestFilterRelevantXcodeProjectFiles(t *testing.T) {
t.Log(`embedded, .git, pod, carthage, .framework, relevant project`)
{
fileList := []string{
@@ -262,7 +262,7 @@ func TestFilterXcodeprojectFiles(t *testing.T) {
"/Users/bitrise/sample-apps-ios-cocoapods/SampleAppWithCocoapods.xcodeproj",
}
- files, err := filterXcodeprojectFiles(fileList, true)
+ files, err := FilterRelevantXcodeProjectFiles(fileList, true)
require.NoError(t, err)
require.Equal(t, 1, len(files))
require.Equal(t, "/Users/bitrise/sample-apps-ios-cocoapods/SampleAppWithCocoapods.xcodeproj", files[0])
@@ -279,7 +279,7 @@ func TestFilterXcodeprojectFiles(t *testing.T) {
"SampleAppWithCocoapods.xcodeproj",
}
- files, err := filterXcodeprojectFiles(fileList, true)
+ files, err := FilterRelevantXcodeProjectFiles(fileList, true)
require.NoError(t, err)
require.Equal(t, 1, len(files))
require.Equal(t, "SampleAppWithCocoapods.xcodeproj", files[0])
@@ -292,7 +292,7 @@ func TestFilterXcodeprojectFiles(t *testing.T) {
"/Users/bitrise/sample-apps-ios-cocoapods/SampleAppWithCocoapods.xcworkspace",
}
- files, err := filterXcodeprojectFiles(fileList, true)
+ files, err := FilterRelevantXcodeProjectFiles(fileList, true)
require.NoError(t, err)
require.Equal(t, 2, len(files))
@@ -308,7 +308,7 @@ func TestFilterXcodeprojectFiles(t *testing.T) {
"build.gradle",
}
- files, err := filterXcodeprojectFiles(fileList, true)
+ files, err := FilterRelevantXcodeProjectFiles(fileList, true)
require.NoError(t, err)
require.Equal(t, 0, len(files))
}
@@ -341,7 +341,7 @@ func TestIsRelevantPodfile(t *testing.T) {
}
}
-func TestFilterPodFiles(t *testing.T) {
+func TestFilterRelevantPodFiles(t *testing.T) {
t.Log(`Contains "Podfile" files`)
{
fileList := []string{
@@ -351,7 +351,7 @@ func TestFilterPodFiles(t *testing.T) {
"path/to/my/Podfile.lock",
}
- files := filterPodFiles(fileList)
+ files := FilterRelevantPodFiles(fileList)
require.Equal(t, 2, len(files))
// Also sorts "Podfile" files by path components length
@@ -366,7 +366,7 @@ func TestFilterPodFiles(t *testing.T) {
"path/to/my/gradle",
}
- files := filterPodFiles(fileList)
+ files := FilterRelevantPodFiles(fileList)
require.Equal(t, 0, len(files))
}
}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/bitrise/models/models.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/bitrise/models/models.go
index ff6638db..73636126 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/bitrise/models/models.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/bitrise/models/models.go
@@ -20,7 +20,7 @@ const (
StepRunStatusCodeSkippedWithRunIf = 4
// Version ...
- Version = "1.3.0"
+ Version = "1.3.1"
)
// StepListItemModel ...
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/bitrise/models/models_methods.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/bitrise/models/models_methods.go
index 9958bb24..530587ad 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/bitrise/models/models_methods.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/bitrise/models/models_methods.go
@@ -523,6 +523,14 @@ func MergeEnvironmentWith(env *envmanModels.EnvironmentItemModel, otherEnv envma
if err != nil {
return err
}
+
+ if otherOptions.IsExpand != nil {
+ options.IsExpand = pointers.NewBoolPtr(*otherOptions.IsExpand)
+ }
+ if otherOptions.SkipIfEmpty != nil {
+ options.SkipIfEmpty = pointers.NewBoolPtr(*otherOptions.SkipIfEmpty)
+ }
+
if otherOptions.Title != nil {
options.Title = pointers.NewStringPtr(*otherOptions.Title)
}
@@ -532,15 +540,15 @@ func MergeEnvironmentWith(env *envmanModels.EnvironmentItemModel, otherEnv envma
if otherOptions.Summary != nil {
options.Summary = pointers.NewStringPtr(*otherOptions.Summary)
}
+ if otherOptions.Category != nil {
+ options.Category = pointers.NewStringPtr(*otherOptions.Category)
+ }
if len(otherOptions.ValueOptions) > 0 {
options.ValueOptions = otherOptions.ValueOptions
}
if otherOptions.IsRequired != nil {
options.IsRequired = pointers.NewBoolPtr(*otherOptions.IsRequired)
}
- if otherOptions.IsExpand != nil {
- options.IsExpand = pointers.NewBoolPtr(*otherOptions.IsExpand)
- }
if otherOptions.IsDontChangeValue != nil {
options.IsDontChangeValue = pointers.NewBoolPtr(*otherOptions.IsDontChangeValue)
}
@@ -584,12 +592,13 @@ func MergeStepWith(step, otherStep stepmanModels.StepModel) (stepmanModels.StepM
if otherStep.Title != nil {
step.Title = pointers.NewStringPtr(*otherStep.Title)
}
- if otherStep.Description != nil {
- step.Description = pointers.NewStringPtr(*otherStep.Description)
- }
if otherStep.Summary != nil {
step.Summary = pointers.NewStringPtr(*otherStep.Summary)
}
+ if otherStep.Description != nil {
+ step.Description = pointers.NewStringPtr(*otherStep.Description)
+ }
+
if otherStep.Website != nil {
step.Website = pointers.NewStringPtr(*otherStep.Website)
}
@@ -599,21 +608,24 @@ func MergeStepWith(step, otherStep stepmanModels.StepModel) (stepmanModels.StepM
if otherStep.SupportURL != nil {
step.SupportURL = pointers.NewStringPtr(*otherStep.SupportURL)
}
+
if otherStep.PublishedAt != nil {
step.PublishedAt = pointers.NewTimePtr(*otherStep.PublishedAt)
}
- if otherStep.Source.Git != "" {
- step.Source.Git = otherStep.Source.Git
- }
- if otherStep.Source.Commit != "" {
- step.Source.Commit = otherStep.Source.Commit
- }
- if len(otherStep.Dependencies) > 0 {
- step.Dependencies = otherStep.Dependencies
+ if otherStep.Source != nil {
+ step.Source = new(stepmanModels.StepSourceModel)
+
+ if otherStep.Source.Git != "" {
+ step.Source.Git = otherStep.Source.Git
+ }
+ if otherStep.Source.Commit != "" {
+ step.Source.Commit = otherStep.Source.Commit
+ }
}
- if len(otherStep.Deps.Brew) > 0 || len(otherStep.Deps.AptGet) > 0 || len(otherStep.Deps.CheckOnly) > 0 {
- step.Deps = otherStep.Deps
+ if len(otherStep.AssetURLs) > 0 {
+ step.AssetURLs = otherStep.AssetURLs
}
+
if len(otherStep.HostOsTags) > 0 {
step.HostOsTags = otherStep.HostOsTags
}
@@ -623,9 +635,20 @@ func MergeStepWith(step, otherStep stepmanModels.StepModel) (stepmanModels.StepM
if len(otherStep.TypeTags) > 0 {
step.TypeTags = otherStep.TypeTags
}
+ if len(otherStep.Dependencies) > 0 {
+ step.Dependencies = otherStep.Dependencies
+ }
+ if otherStep.Toolkit != nil {
+ step.Toolkit = new(stepmanModels.StepToolkitModel)
+ *step.Toolkit = *otherStep.Toolkit
+ }
+ if otherStep.Deps != nil && (len(otherStep.Deps.Brew) > 0 || len(otherStep.Deps.AptGet) > 0 || len(otherStep.Deps.CheckOnly) > 0) {
+ step.Deps = otherStep.Deps
+ }
if otherStep.IsRequiresAdminUser != nil {
step.IsRequiresAdminUser = pointers.NewBoolPtr(*otherStep.IsRequiresAdminUser)
}
+
if otherStep.IsAlwaysRun != nil {
step.IsAlwaysRun = pointers.NewBoolPtr(*otherStep.IsAlwaysRun)
}
@@ -635,6 +658,9 @@ func MergeStepWith(step, otherStep stepmanModels.StepModel) (stepmanModels.StepM
if otherStep.RunIf != nil {
step.RunIf = pointers.NewStringPtr(*otherStep.RunIf)
}
+ if otherStep.Timeout != nil {
+ step.Timeout = pointers.NewIntPtr(*otherStep.Timeout)
+ }
for _, input := range step.Inputs {
key, _, err := input.GetKeyValuePair()
@@ -673,12 +699,12 @@ func MergeStepWith(step, otherStep stepmanModels.StepModel) (stepmanModels.StepM
// GetStepIDStepDataPair ...
func GetStepIDStepDataPair(stepListItem StepListItemModel) (string, stepmanModels.StepModel, error) {
if len(stepListItem) > 1 {
- return "", stepmanModels.StepModel{}, errors.New("StepListItem contains more than 1 key-value pair!")
+ return "", stepmanModels.StepModel{}, errors.New("StepListItem contains more than 1 key-value pair")
}
for key, value := range stepListItem {
return key, value, nil
}
- return "", stepmanModels.StepModel{}, errors.New("StepListItem does not contain a key-value pair!")
+ return "", stepmanModels.StepModel{}, errors.New("StepListItem does not contain a key-value pair")
}
// CreateStepIDDataFromString ...
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/envman/models/models.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/envman/models/models.go
index 22ecdb4b..1dcc6db9 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/envman/models/models.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/envman/models/models.go
@@ -9,6 +9,7 @@ type EnvironmentItemOptionsModel struct {
Title *string `json:"title,omitempty" yaml:"title,omitempty"`
Description *string `json:"description,omitempty" yaml:"description,omitempty"`
Summary *string `json:"summary,omitempty" yaml:"summary,omitempty"`
+ Category *string `json:"category,omitempty" yaml:"category,omitempty"`
ValueOptions []string `json:"value_options,omitempty" yaml:"value_options,omitempty"`
IsRequired *bool `json:"is_required,omitempty" yaml:"is_required,omitempty"`
IsDontChangeValue *bool `json:"is_dont_change_value,omitempty" yaml:"is_dont_change_value,omitempty"`
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/envman/models/models_methods.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/envman/models/models_methods.go
index 218da8cb..9c7c70de 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/envman/models/models_methods.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/envman/models/models_methods.go
@@ -4,8 +4,8 @@ import (
"encoding/json"
"errors"
"fmt"
+ "sort"
- log "github.com/Sirupsen/logrus"
"github.com/bitrise-io/go-utils/parseutil"
"github.com/bitrise-io/go-utils/pointers"
)
@@ -39,47 +39,66 @@ func NewEnvJSONList(jsonStr string) (EnvsJSONListModel, error) {
// GetKeyValuePair ...
func (env EnvironmentItemModel) GetKeyValuePair() (string, string, error) {
- if len(env) > 2 {
- return "", "", fmt.Errorf("Invalid env: more than 2 fields: %#v", env)
- }
-
- retKey := ""
- retValue := ""
+ // Collect keys and values
+ keys := []string{}
+ values := []interface{}{}
for key, value := range env {
- if key != OptionsKey {
- if retKey != "" {
- return "", "", fmt.Errorf("Invalid env: more than 1 key-value field found: %#v", env)
- }
+ keys = append(keys, key)
+ values = append(values, value)
+ }
- valueStr, ok := value.(string)
- if !ok {
- if value == nil {
- valueStr = ""
- } else {
- valueStr = parseutil.CastToString(value)
- if valueStr == "" {
- return "", "", fmt.Errorf("Invalid value, not a string (key:%#v) (value:%#v)", key, value)
- }
- }
- }
+ if len(keys) == 0 {
+ return "", "", errors.New("no environment key specified")
+ } else if len(keys) > 2 {
+ sort.Strings(keys)
+ return "", "", fmt.Errorf("more than 2 keys specified: %v", keys)
+ }
+
+ // Collect env key and value
+ key := ""
+ var value interface{}
+ optionsFound := false
- retKey = key
- retValue = valueStr
+ for i := 0; i < len(keys); i++ {
+ k := keys[i]
+ if k != OptionsKey {
+ key = k
+ value = values[i]
+ } else {
+ optionsFound = true
}
}
- if retKey == "" {
- return "", "", errors.New("Invalid env: no envKey specified!")
+ if key == "" {
+ sort.Strings(keys)
+ return "", "", fmt.Errorf("no environment key found, keys: %v", keys)
+ }
+ if len(keys) > 1 && !optionsFound {
+ sort.Strings(keys)
+ return "", "", fmt.Errorf("more than 1 environment key specified: %v", keys)
}
- return retKey, retValue, nil
+ // Cast env value to string
+ valueStr := ""
+
+ if value != nil {
+ if str, ok := value.(string); ok {
+ valueStr = str
+ } else if str := parseutil.CastToString(value); str != "" {
+ valueStr = str
+ } else {
+ return "", "", fmt.Errorf("value (%#v) is not a string for key (%s)", value, key)
+ }
+ }
+
+ return key, valueStr, nil
}
// ParseFromInterfaceMap ...
func (envSerModel *EnvironmentItemOptionsModel) ParseFromInterfaceMap(input map[string]interface{}) error {
for keyStr, value := range input {
- log.Debugf(" ** processing (key:%#v) (value:%#v) (envSerModel:%#v)", keyStr, value, envSerModel)
+
switch keyStr {
case "title":
envSerModel.Title = parseutil.CastToStringPtr(value)
@@ -87,6 +106,8 @@ func (envSerModel *EnvironmentItemOptionsModel) ParseFromInterfaceMap(input map[
envSerModel.Description = parseutil.CastToStringPtr(value)
case "summary":
envSerModel.Summary = parseutil.CastToStringPtr(value)
+ case "category":
+ envSerModel.Category = parseutil.CastToStringPtr(value)
case "value_options":
castedValue, ok := value.([]string)
if !ok {
@@ -95,14 +116,14 @@ func (envSerModel *EnvironmentItemOptionsModel) ParseFromInterfaceMap(input map[
castedValue = []string{}
interfArr, ok := value.([]interface{})
if !ok {
- return fmt.Errorf("Invalid value type (key:%s): %#v", keyStr, value)
+ return fmt.Errorf("invalid value type (%#v) for key: %s", value, keyStr)
}
for _, interfItm := range interfArr {
castedItm, ok := interfItm.(string)
if !ok {
castedItm = parseutil.CastToString(interfItm)
if castedItm == "" {
- return fmt.Errorf("Invalid value in value_options (%#v), not a string: %#v", interfArr, interfItm)
+ return fmt.Errorf("not a string value (%#v) in value_options", interfItm)
}
}
castedValue = append(castedValue, castedItm)
@@ -112,35 +133,35 @@ func (envSerModel *EnvironmentItemOptionsModel) ParseFromInterfaceMap(input map[
case "is_required":
castedBoolPtr, ok := parseutil.CastToBoolPtr(value)
if !ok {
- return fmt.Errorf("Failed to parse bool value (%#v) for key (%s)", value, keyStr)
+ return fmt.Errorf("failed to parse bool value (%#v) for key (%s)", value, keyStr)
}
envSerModel.IsRequired = castedBoolPtr
case "is_expand":
castedBoolPtr, ok := parseutil.CastToBoolPtr(value)
if !ok {
- return fmt.Errorf("Failed to parse bool value (%#v) for key (%s)", value, keyStr)
+ return fmt.Errorf("failed to parse bool value (%#v) for key (%s)", value, keyStr)
}
envSerModel.IsExpand = castedBoolPtr
case "is_dont_change_value":
castedBoolPtr, ok := parseutil.CastToBoolPtr(value)
if !ok {
- return fmt.Errorf("Failed to parse bool value (%#v) for key (%s)", value, keyStr)
+ return fmt.Errorf("failed to parse bool value (%#v) for key (%s)", value, keyStr)
}
envSerModel.IsDontChangeValue = castedBoolPtr
case "is_template":
castedBoolPtr, ok := parseutil.CastToBoolPtr(value)
if !ok {
- return fmt.Errorf("Failed to parse bool value (%#v) for key (%s)", value, keyStr)
+ return fmt.Errorf("failed to parse bool value (%#v) for key (%s)", value, keyStr)
}
envSerModel.IsTemplate = castedBoolPtr
case "skip_if_empty":
castedBoolPtr, ok := parseutil.CastToBoolPtr(value)
if !ok {
- return fmt.Errorf("Failed to parse bool value (%#v) for key (%s)", value, keyStr)
+ return fmt.Errorf("failed to parse bool value (%#v) for key (%s)", value, keyStr)
}
envSerModel.SkipIfEmpty = castedBoolPtr
default:
- return fmt.Errorf("Not supported key found in options: %#v", keyStr)
+ return fmt.Errorf("not supported key found in options: %s", keyStr)
}
}
return nil
@@ -158,8 +179,6 @@ func (env EnvironmentItemModel) GetOptions() (EnvironmentItemOptionsModel, error
return envItmCasted, nil
}
- log.Debugf(" * processing env:%#v", env)
-
// if it's read from a file (YAML/JSON) then it's most likely not the proper type
// so cast it from the generic interface-interface map
normalizedOptsInterfaceMap := make(map[string]interface{})
@@ -169,7 +188,7 @@ func (env EnvironmentItemModel) GetOptions() (EnvironmentItemOptionsModel, error
for key, value := range optionsInterfaceMap {
keyStr, ok := key.(string)
if !ok {
- return EnvironmentItemOptionsModel{}, fmt.Errorf("Failed to cask Options key to String: %#v", key)
+ return EnvironmentItemOptionsModel{}, fmt.Errorf("failed to cask options key (%#v) to string", key)
}
normalizedOptsInterfaceMap[keyStr] = value
}
@@ -188,11 +207,10 @@ func (env EnvironmentItemModel) GetOptions() (EnvironmentItemOptionsModel, error
return EnvironmentItemOptionsModel{}, err
}
- log.Debugf("Parsed options: %#v\n", options)
return options, nil
}
- return EnvironmentItemOptionsModel{}, fmt.Errorf("Invalid options (value:%#v) - failed to cast", value)
+ return EnvironmentItemOptionsModel{}, fmt.Errorf("failed to cast options value: (%#v)", value)
}
// Normalize ...
@@ -220,6 +238,9 @@ func (env *EnvironmentItemModel) FillMissingDefaults() error {
if options.Summary == nil {
options.Summary = pointers.NewStringPtr("")
}
+ if options.Category == nil {
+ options.Category = pointers.NewStringPtr("")
+ }
if options.IsRequired == nil {
options.IsRequired = pointers.NewBoolPtr(DefaultIsRequired)
}
@@ -246,7 +267,7 @@ func (env EnvironmentItemModel) Validate() error {
return err
}
if key == "" {
- return errors.New("Invalid environment: empty env_key")
+ return errors.New("no environment key found")
}
_, err = env.GetOptions()
if err != nil {
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/go-utils/cmdex/run.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/go-utils/cmdex/run.go
index f9790726..5d55c68b 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/go-utils/cmdex/run.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/go-utils/cmdex/run.go
@@ -26,6 +26,12 @@ func NewCommand(name string, args ...string) *CommandModel {
}
}
+// NewCommandWithStandardOuts - same as NewCommand, but sets the command's
+// stdout and stderr to the standard (OS) out (os.Stdout) and err (os.Stderr)
+func NewCommandWithStandardOuts(name string, args ...string) *CommandModel {
+ return NewCommand(name, args...).SetStdout(os.Stdout).SetStderr(os.Stderr)
+}
+
// NewCommandFromSlice ...
func NewCommandFromSlice(cmdSlice []string) (*CommandModel, error) {
if len(cmdSlice) == 0 {
@@ -106,6 +112,11 @@ func (command CommandModel) RunAndReturnTrimmedCombinedOutput() (string, error)
return RunCmdAndReturnTrimmedCombinedOutput(command.cmd)
}
+// PrintableCommandArgs ...
+func (command CommandModel) PrintableCommandArgs() string {
+ return PrintableCommandArgs(false, command.cmd.Args)
+}
+
// ----------
// PrintableCommandArgs ...
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/stepman/models/models.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/stepman/models/models.go
index be2e8026..471de0bf 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/stepman/models/models.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/stepman/models/models.go
@@ -86,7 +86,7 @@ type StepModel struct {
SupportURL *string `json:"support_url,omitempty" yaml:"support_url,omitempty"`
// auto-generated at share
PublishedAt *time.Time `json:"published_at,omitempty" yaml:"published_at,omitempty"`
- Source StepSourceModel `json:"source,omitempty" yaml:"source,omitempty"`
+ Source *StepSourceModel `json:"source,omitempty" yaml:"source,omitempty"`
AssetURLs map[string]string `json:"asset_urls,omitempty" yaml:"asset_urls,omitempty"`
//
HostOsTags []string `json:"host_os_tags,omitempty" yaml:"host_os_tags,omitempty"`
@@ -94,7 +94,7 @@ type StepModel struct {
TypeTags []string `json:"type_tags,omitempty" yaml:"type_tags,omitempty"`
Dependencies []DependencyModel `json:"dependencies,omitempty" yaml:"dependencies,omitempty"`
Toolkit *StepToolkitModel `json:"toolkit,omitempty" yaml:"toolkit,omitempty"`
- Deps DepsModel `json:"deps,omitempty" yaml:"deps,omitempty"`
+ Deps *DepsModel `json:"deps,omitempty" yaml:"deps,omitempty"`
IsRequiresAdminUser *bool `json:"is_requires_admin_user,omitempty" yaml:"is_requires_admin_user,omitempty"`
// IsAlwaysRun : if true then this step will always run,
// even if a previous step fails.
@@ -104,7 +104,8 @@ type StepModel struct {
// steps will run which are marked with IsAlwaysRun.
IsSkippable *bool `json:"is_skippable,omitempty" yaml:"is_skippable,omitempty"`
// RunIf : only run the step if the template example evaluates to true
- RunIf *string `json:"run_if,omitempty" yaml:"run_if,omitempty"`
+ RunIf *string `json:"run_if,omitempty" yaml:"run_if,omitempty"`
+ Timeout *int `json:"timeout,omitempty" yaml:"timeout,omitempty"`
//
Inputs []envmanModels.EnvironmentItemModel `json:"inputs,omitempty" yaml:"inputs,omitempty"`
Outputs []envmanModels.EnvironmentItemModel `json:"outputs,omitempty" yaml:"outputs,omitempty"`
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/stepman/models/models_methods.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/stepman/models/models_methods.go
index 8f6c4420..5c3fba7b 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/stepman/models/models_methods.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/stepman/models/models_methods.go
@@ -18,6 +18,8 @@ const (
DefaultIsRequiresAdminUser = false
// DefaultIsSkippable ...
DefaultIsSkippable = false
+ // DefaultTimeout ...
+ DefaultTimeout = 0
)
// CreateFromJSON ...
@@ -112,6 +114,10 @@ func (step StepModel) AuditBeforeShare() error {
return errors.New("Invalid step: missing or empty required 'website' property")
}
+ if step.Timeout != nil && *step.Timeout < 0 {
+ return errors.New("Invalid step: timeout less then 0")
+ }
+
if err := step.ValidateInputAndOutputEnvs(true); err != nil {
return err
}
@@ -128,6 +134,9 @@ func (step StepModel) Audit() error {
if step.PublishedAt == nil || (*step.PublishedAt).Equal(time.Time{}) {
return errors.New("Invalid step: missing or empty required 'PublishedAt' property")
}
+ if step.Source == nil {
+ return errors.New("Invalid step: missing or empty required 'Source' property")
+ }
if err := step.Source.validateSource(); err != nil {
return err
}
@@ -167,6 +176,9 @@ func (step *StepModel) FillMissingDefaults() error {
if step.RunIf == nil {
step.RunIf = pointers.NewStringPtr("")
}
+ if step.Timeout == nil {
+ step.Timeout = pointers.NewIntPtr(DefaultTimeout)
+ }
for _, input := range step.Inputs {
err := input.FillMissingDefaults()
@@ -216,6 +228,10 @@ func (collection StepCollectionModel) GetDownloadLocations(id, version string) (
return []DownloadLocationModel{}, fmt.Errorf("Collection (%s) doesn't contains step %s (%s)", collection.SteplibSource, id, version)
}
+ if step.Source == nil {
+ return []DownloadLocationModel{}, errors.New("Missing Source property")
+ }
+
locations := []DownloadLocationModel{}
for _, downloadLocation := range collection.DownloadLocations {
switch downloadLocation.Type {
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/xcode-utils/xcodeproj/ruby_script_contents.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/xcode-utils/xcodeproj/ruby_script_contents.go
deleted file mode 100644
index fb6f4184..00000000
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/xcode-utils/xcodeproj/ruby_script_contents.go
+++ /dev/null
@@ -1,133 +0,0 @@
-package xcodeproj
-
-const xcodeprojGemfileContent = `source 'https://rubygems.org'
-
-gem 'xcodeproj'
-`
-
-const recreateUserSchemesRubyScriptContent = `require 'xcodeproj'
-
-project_path = ENV['project_path']
-
-begin
- raise 'empty path' if project_path.empty?
-
- project = Xcodeproj::Project.open project_path
-
- #-----
- # Separate targets
- native_targets = project.native_targets
-
- build_targets = []
- test_targets = []
-
- native_targets.each do |target|
- test_targets << target if target.test_target_type?
- build_targets << target unless target.test_target_type?
- end
-
- raise 'no build target found' unless build_targets.count
-
- #-----
- # Map targets
- target_mapping = {}
-
- build_targets.each do |target|
- target_mapping[target] = []
- end
-
- test_targets.each do |target|
- target_dependencies = target.dependencies
-
- dependent_targets = []
- target_dependencies.each do |target_dependencie|
- dependent_targets << target_dependencie.target
- end
-
- dependent_targets.each do |dependent_target|
- if build_targets.include? dependent_target
- target_mapping[dependent_target] = [] unless target_mapping[dependent_target]
- target_mapping[dependent_target] << target
- end
- end
- end
-
- #-----
- # Create schemes
- target_mapping.each do |build_t, test_ts|
- scheme = Xcodeproj::XCScheme.new
-
- scheme.set_launch_target build_t
- scheme.add_build_target build_t
-
- test_ts.each do |test_t|
- scheme.add_test_target test_t
- end
-
- scheme.save_as project_path, build_t.name
- end
-rescue => ex
- puts ex.inspect.to_s
- puts '--- Stack trace: ---'
- puts ex.backtrace.to_s
- exit 1
-end
-`
-
-const projectBuildTargetTestTargetsMapRubyScriptContent = `
-require 'xcodeproj'
-require 'json'
-
-project_path = ENV['project_path']
-
-begin
- raise 'empty path' if project_path.empty?
-
- project = Xcodeproj::Project.open project_path
-
- #-----
- # Separate targets
- native_targets = project.native_targets
-
- build_targets = []
- test_targets = []
-
- native_targets.each do |target|
- test_targets << target if target.test_target_type?
- build_targets << target unless target.test_target_type?
- end
-
- raise 'no build target found' unless build_targets.count
-
- #-----
- # Map targets
- target_mapping = {}
-
- build_targets.each do |target|
- target_mapping[target.name] = []
- end
-
- test_targets.each do |target|
- target_dependencies = target.dependencies
-
- dependent_targets = []
- target_dependencies.each do |target_dependencie|
- dependent_targets << target_dependencie.target
- end
-
- dependent_targets.each do |dependent_target|
- if build_targets.include? dependent_target
- target_mapping[dependent_target.name] = [] unless target_mapping[dependent_target.name]
- target_mapping[dependent_target.name] << target.name
- end
- end
- end
-
- puts target_mapping.to_json
-rescue => ex
- puts ex.inspect.to_s
- puts '--- Stack trace: ---'
- puts ex.backtrace.to_s
- exit 1
-end
-`
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/xcode-utils/xcodeproj/xcodeproj.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/xcode-utils/xcodeproj/xcodeproj.go
deleted file mode 100644
index 76f8acf0..00000000
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/xcode-utils/xcodeproj/xcodeproj.go
+++ /dev/null
@@ -1,870 +0,0 @@
-package xcodeproj
-
-import (
- "bufio"
- "bytes"
- "errors"
- "fmt"
- "os"
- "path"
- "path/filepath"
- "regexp"
- "sort"
- "strings"
-
- "github.com/bitrise-io/go-utils/cmdex"
- "github.com/bitrise-io/go-utils/errorutil"
- "github.com/bitrise-io/go-utils/fileutil"
- "github.com/bitrise-io/go-utils/pathutil"
-)
-
-// Extensions
-const (
- // XCWorkspaceExt ...
- XCWorkspaceExt = ".xcworkspace"
- // XCodeProjExt ...
- XCodeProjExt = ".xcodeproj"
- // XCSchemeExt ...
- XCSchemeExt = ".xcscheme"
-)
-
-// IsXCodeProj ...
-func IsXCodeProj(pth string) bool {
- return strings.HasSuffix(pth, XCodeProjExt)
-}
-
-// IsXCWorkspace ...
-func IsXCWorkspace(pth string) bool {
- return strings.HasSuffix(pth, XCWorkspaceExt)
-}
-
-// SchemeNameFromPath ...
-func SchemeNameFromPath(schemePth string) string {
- basename := filepath.Base(schemePth)
- ext := filepath.Ext(schemePth)
- if ext != XCSchemeExt {
- return ""
- }
- return strings.TrimSuffix(basename, ext)
-}
-
-// SchemeFileContainsXCTestBuildAction ...
-func SchemeFileContainsXCTestBuildAction(schemeFilePth string) (bool, error) {
- content, err := fileutil.ReadStringFromFile(schemeFilePth)
- if err != nil {
- return false, err
- }
-
- return schemeFileContentContainsXCTestBuildAction(content)
-}
-
-// ProjectSharedSchemeFilePaths ...
-func ProjectSharedSchemeFilePaths(projectPth string) ([]string, error) {
- return sharedSchemeFilePaths(projectPth)
-}
-
-// WorkspaceSharedSchemeFilePaths ...
-func WorkspaceSharedSchemeFilePaths(workspacePth string) ([]string, error) {
- workspaceSchemeFilePaths, err := sharedSchemeFilePaths(workspacePth)
- if err != nil {
- return []string{}, err
- }
-
- projects, err := WorkspaceProjectReferences(workspacePth)
- if err != nil {
- return nil, err
- }
-
- for _, project := range projects {
- projectSchemeFilePaths, err := sharedSchemeFilePaths(project)
- if err != nil {
- return []string{}, err
- }
- workspaceSchemeFilePaths = append(workspaceSchemeFilePaths, projectSchemeFilePaths...)
- }
-
- sort.Strings(workspaceSchemeFilePaths)
-
- return workspaceSchemeFilePaths, nil
-}
-
-// ProjectSharedSchemes ...
-func ProjectSharedSchemes(projectPth string) (map[string]bool, error) {
- return sharedSchemes(projectPth)
-}
-
-// WorkspaceSharedSchemes ...
-func WorkspaceSharedSchemes(workspacePth string) (map[string]bool, error) {
- schemeMap, err := sharedSchemes(workspacePth)
- if err != nil {
- return map[string]bool{}, err
- }
-
- projects, err := WorkspaceProjectReferences(workspacePth)
- if err != nil {
- return nil, err
- }
-
- for _, project := range projects {
- projectSchemeMap, err := sharedSchemes(project)
- if err != nil {
- return map[string]bool{}, err
- }
-
- for name, hasXCtest := range projectSchemeMap {
- schemeMap[name] = hasXCtest
- }
- }
-
- return schemeMap, nil
-}
-
-// ProjectUserSchemeFilePaths ...
-func ProjectUserSchemeFilePaths(projectPth string) ([]string, error) {
- return userSchemeFilePaths(projectPth)
-}
-
-// WorkspaceUserSchemeFilePaths ...
-func WorkspaceUserSchemeFilePaths(workspacePth string) ([]string, error) {
- workspaceSchemeFilePaths, err := userSchemeFilePaths(workspacePth)
- if err != nil {
- return []string{}, err
- }
-
- projects, err := WorkspaceProjectReferences(workspacePth)
- if err != nil {
- return nil, err
- }
-
- for _, project := range projects {
- projectSchemeFilePaths, err := userSchemeFilePaths(project)
- if err != nil {
- return []string{}, err
- }
- workspaceSchemeFilePaths = append(workspaceSchemeFilePaths, projectSchemeFilePaths...)
- }
-
- sort.Strings(workspaceSchemeFilePaths)
-
- return workspaceSchemeFilePaths, nil
-}
-
-// ProjectUserSchemes ...
-func ProjectUserSchemes(projectPth string) (map[string]bool, error) {
- return userSchemes(projectPth)
-}
-
-// WorkspaceUserSchemes ...
-func WorkspaceUserSchemes(workspacePth string) (map[string]bool, error) {
- schemeMap, err := userSchemes(workspacePth)
- if err != nil {
- return map[string]bool{}, err
- }
-
- projects, err := WorkspaceProjectReferences(workspacePth)
- if err != nil {
- return nil, err
- }
-
- for _, project := range projects {
- projectSchemeMap, err := userSchemes(project)
- if err != nil {
- return map[string]bool{}, err
- }
-
- for name, hasXCtest := range projectSchemeMap {
- schemeMap[name] = hasXCtest
- }
- }
-
- return schemeMap, nil
-}
-
-func runRubyScriptForOutput(scriptContent, gemfileContent, inDir string, withEnvs []string) (string, error) {
- tmpDir, err := pathutil.NormalizedOSTempDirPath("bitrise")
- if err != nil {
- return "", err
- }
-
- // Write Gemfile to file and install
- if gemfileContent != "" {
- gemfilePth := path.Join(tmpDir, "Gemfile")
- if err := fileutil.WriteStringToFile(gemfilePth, gemfileContent); err != nil {
- return "", err
- }
-
- cmd := cmdex.NewCommand("bundle", "install")
-
- if inDir != "" {
- cmd.SetDir(inDir)
- }
-
- withEnvs = append(withEnvs, "BUNDLE_GEMFILE="+gemfilePth)
- cmd.SetEnvs(withEnvs)
-
- var outBuffer bytes.Buffer
- outWriter := bufio.NewWriter(&outBuffer)
- cmd.SetStdout(outWriter)
-
- var errBuffer bytes.Buffer
- errWriter := bufio.NewWriter(&errBuffer)
- cmd.SetStderr(errWriter)
-
- if err := cmd.Run(); err != nil {
- if errorutil.IsExitStatusError(err) {
- errMsg := ""
- if errBuffer.String() != "" {
- errMsg += fmt.Sprintf("error: %s\n", errBuffer.String())
- }
- if outBuffer.String() != "" {
- errMsg += fmt.Sprintf("output: %s", outBuffer.String())
- }
- if errMsg == "" {
- return "", err
- }
-
- return "", errors.New(errMsg)
- }
- return "", err
- }
- }
-
- // Write script to file and run
- rubyScriptPth := path.Join(tmpDir, "script.rb")
- if err := fileutil.WriteStringToFile(rubyScriptPth, scriptContent); err != nil {
- return "", err
- }
-
- var cmd *cmdex.CommandModel
-
- if gemfileContent != "" {
- cmd = cmdex.NewCommand("bundle", "exec", "ruby", rubyScriptPth)
- } else {
- cmd = cmdex.NewCommand("ruby", rubyScriptPth)
- }
-
- if inDir != "" {
- cmd.SetDir(inDir)
- }
-
- if len(withEnvs) > 0 {
- cmd.SetEnvs(withEnvs)
- }
-
- var outBuffer bytes.Buffer
- outWriter := bufio.NewWriter(&outBuffer)
- cmd.SetStdout(outWriter)
-
- var errBuffer bytes.Buffer
- errWriter := bufio.NewWriter(&errBuffer)
- cmd.SetStderr(errWriter)
-
- if err := cmd.Run(); err != nil {
- if errorutil.IsExitStatusError(err) {
- errMsg := ""
- if errBuffer.String() != "" {
- errMsg += fmt.Sprintf("error: %s\n", errBuffer.String())
- }
- if outBuffer.String() != "" {
- errMsg += fmt.Sprintf("output: %s", outBuffer.String())
- }
- if errMsg == "" {
- return "", err
- }
-
- return "", errors.New(errMsg)
- }
- return "", err
- }
-
- return outBuffer.String(), nil
-}
-
-func runRubyScript(scriptContent, gemfileContent, inDir string, withEnvs []string) error {
- _, err := runRubyScriptForOutput(scriptContent, gemfileContent, inDir, withEnvs)
- return err
-}
-
-// ReCreateProjectUserSchemes ....
-func ReCreateProjectUserSchemes(projectPth string) error {
- projectDir := filepath.Dir(projectPth)
-
- projectBase := filepath.Base(projectPth)
- envs := append(os.Environ(), "project_path="+projectBase, "LC_ALL=en_US.UTF-8")
-
- return runRubyScript(recreateUserSchemesRubyScriptContent, xcodeprojGemfileContent, projectDir, envs)
-}
-
-// ReCreateWorkspaceUserSchemes ...
-func ReCreateWorkspaceUserSchemes(workspacePth string) error {
- projects, err := WorkspaceProjectReferences(workspacePth)
- if err != nil {
- return err
- }
-
- for _, project := range projects {
- if err := ReCreateProjectUserSchemes(project); err != nil {
- return err
- }
- }
-
- return nil
-}
-
-// ProjectTargets ...
-func ProjectTargets(projectPth string) (map[string]bool, error) {
- pbxProjPth := filepath.Join(projectPth, "project.pbxproj")
- if exist, err := pathutil.IsPathExists(pbxProjPth); err != nil {
- return map[string]bool{}, err
- } else if !exist {
- return map[string]bool{}, fmt.Errorf("project.pbxproj does not exist at: %s", pbxProjPth)
- }
-
- content, err := fileutil.ReadStringFromFile(pbxProjPth)
- if err != nil {
- return map[string]bool{}, err
- }
-
- return pbxprojContentTartgets(content)
-
-}
-
-// WorkspaceTargets ...
-func WorkspaceTargets(workspacePth string) (map[string]bool, error) {
- projects, err := WorkspaceProjectReferences(workspacePth)
- if err != nil {
- return nil, err
- }
-
- targetMap := map[string]bool{}
- for _, project := range projects {
- projectTargetMap, err := ProjectTargets(project)
- if err != nil {
- return map[string]bool{}, err
- }
-
- for name, hasXCTest := range projectTargetMap {
- targetMap[name] = hasXCTest
- }
- }
-
- return targetMap, nil
-}
-
-// WorkspaceProjectReferences ...
-func WorkspaceProjectReferences(workspace string) ([]string, error) {
- projects := []string{}
-
- workspaceDir := filepath.Dir(workspace)
-
- xcworkspacedataPth := path.Join(workspace, "contents.xcworkspacedata")
- if exist, err := pathutil.IsPathExists(xcworkspacedataPth); err != nil {
- return []string{}, err
- } else if !exist {
- return []string{}, fmt.Errorf("contents.xcworkspacedata does not exist at: %s", xcworkspacedataPth)
- }
-
- xcworkspacedataStr, err := fileutil.ReadStringFromFile(xcworkspacedataPth)
- if err != nil {
- return []string{}, err
- }
-
- xcworkspacedataLines := strings.Split(xcworkspacedataStr, "\n")
- fileRefStart := false
- regexp := regexp.MustCompile(`location = "(.+):(.+).xcodeproj"`)
-
- for _, line := range xcworkspacedataLines {
- if strings.Contains(line, ".+)"`)
- testableReferenceEndPattern := ""
- isTestableReference := false
-
- xctestBuildableReferenceNameRegexp := regexp.MustCompile(`BuildableName = ".+.xctest"`)
-
- scanner := bufio.NewScanner(strings.NewReader(schemeFileContent))
- for scanner.Scan() {
- line := scanner.Text()
-
- if strings.TrimSpace(line) == testActionEndPattern {
- break
- }
-
- if strings.TrimSpace(line) == testActionStartPattern {
- isTestableAction = true
- continue
- }
-
- if !isTestableAction {
- continue
- }
-
- // TestAction
-
- if strings.TrimSpace(line) == testableReferenceEndPattern {
- isTestableReference = false
- continue
- }
-
- if strings.TrimSpace(line) == testableReferenceStartPattern {
- isTestableReference = true
- continue
- }
-
- if !isTestableReference {
- continue
- }
-
- // TestableReference
-
- if matches := testableReferenceSkippedRegexp.FindStringSubmatch(line); len(matches) > 1 {
- skipped := matches[1]
- if skipped != "NO" {
- break
- }
- }
-
- if match := xctestBuildableReferenceNameRegexp.FindString(line); match != "" {
- return true, nil
- }
- }
-
- if err := scanner.Err(); err != nil {
- return false, err
- }
-
- return false, nil
-}
-
-// PBXTargetDependency ...
-type PBXTargetDependency struct {
- id string
- isa string
- target string
-}
-
-func parsePBXTargetDependencies(pbxprojContent string) ([]PBXTargetDependency, error) {
- pbxTargetDependencies := []PBXTargetDependency{}
-
- id := ""
- isa := ""
- target := ""
-
- beginPBXTargetDependencySectionPattern := `/* Begin PBXTargetDependency section */`
- endPBXTargetDependencySectionPattern := `/* End PBXTargetDependency section */`
- isPBXTargetDependencySection := false
-
- // BAAFFEEF19EE788800F3AC91 /* PBXTargetDependency */ = {
- beginPBXTargetDependencyRegexp := regexp.MustCompile(`\s*(?P[A-Z0-9]+) /\* (?P.*) \*/ = {`)
- endPBXTargetDependencyPattern := `};`
- isPBXTargetDependency := false
-
- // isa = PBXTargetDependency;
- isaRegexp := regexp.MustCompile(`\s*isa = (?P.*);`)
- // target = BAAFFED019EE788800F3AC91 /* SampleAppWithCocoapods */;
- targetRegexp := regexp.MustCompile(`\s*target = (?P[A-Z0-9]+) /\* (?P.*) \*/;`)
-
- scanner := bufio.NewScanner(strings.NewReader(pbxprojContent))
- for scanner.Scan() {
- line := scanner.Text()
-
- if strings.TrimSpace(line) == endPBXTargetDependencySectionPattern {
- break
- }
-
- if strings.TrimSpace(line) == beginPBXTargetDependencySectionPattern {
- isPBXTargetDependencySection = true
- continue
- }
-
- if !isPBXTargetDependencySection {
- continue
- }
-
- // PBXTargetDependency section
-
- if strings.TrimSpace(line) == endPBXTargetDependencyPattern {
- pbxTargetDependency := PBXTargetDependency{
- id: id,
- isa: isa,
- target: target,
- }
- pbxTargetDependencies = append(pbxTargetDependencies, pbxTargetDependency)
-
- id = ""
- isa = ""
- target = ""
-
- isPBXTargetDependency = false
- continue
- }
-
- if matches := beginPBXTargetDependencyRegexp.FindStringSubmatch(line); len(matches) == 3 {
- id = matches[1]
- isa = matches[2]
-
- isPBXTargetDependency = true
- continue
- }
-
- if !isPBXTargetDependency {
- continue
- }
-
- // PBXTargetDependency item
-
- if matches := isaRegexp.FindStringSubmatch(line); len(matches) == 2 {
- isa = strings.Trim(matches[1], `"`)
- }
-
- if matches := targetRegexp.FindStringSubmatch(line); len(matches) == 3 {
- targetID := strings.Trim(matches[1], `"`)
- // targetName := strings.Trim(matches[2], `"`)
-
- target = targetID
- }
- }
-
- return pbxTargetDependencies, nil
-}
-
-// PBXNativeTarget ...
-type PBXNativeTarget struct {
- id string
- isa string
- dependencies []string
- name string
- productPath string
- productType string
-}
-
-func parsePBXNativeTargets(pbxprojContent string) ([]PBXNativeTarget, error) {
- pbxNativeTargets := []PBXNativeTarget{}
-
- id := ""
- isa := ""
- dependencies := []string{}
- name := ""
- productPath := ""
- productType := ""
-
- beginPBXNativeTargetSectionPattern := `/* Begin PBXNativeTarget section */`
- endPBXNativeTargetSectionPattern := `/* End PBXNativeTarget section */`
- isPBXNativeTargetSection := false
-
- // BAAFFED019EE788800F3AC91 /* SampleAppWithCocoapods */ = {
- beginPBXNativeTargetRegexp := regexp.MustCompile(`\s*(?P[A-Z0-9]+) /\* (?P.*) \*/ = {`)
- endPBXNativeTargetPattern := `};`
- isPBXNativeTarget := false
-
- // isa = PBXNativeTarget;
- isaRegexp := regexp.MustCompile(`\s*isa = (?P.*);`)
-
- beginDependenciesPattern := `dependencies = (`
- dependencieRegexp := regexp.MustCompile(`\s*(?P[A-Z0-9]+) /\* (?P.*) \*/,`)
- endDependenciesPattern := `);`
- isDependencies := false
-
- // name = SampleAppWithCocoapods;
- nameRegexp := regexp.MustCompile(`\s*name = (?P.*);`)
- // productReference = BAAFFEED19EE788800F3AC91 /* SampleAppWithCocoapodsTests.xctest */;
- productReferenceRegexp := regexp.MustCompile(`\s*productReference = (?P[A-Z0-9]+) /\* (?P.*) \*/;`)
- // productType = "com.apple.product-type.bundle.unit-test";
- productTypeRegexp := regexp.MustCompile(`\s*productType = (?P.*);`)
-
- scanner := bufio.NewScanner(strings.NewReader(pbxprojContent))
- for scanner.Scan() {
- line := scanner.Text()
-
- if strings.TrimSpace(line) == endPBXNativeTargetSectionPattern {
- break
- }
-
- if strings.TrimSpace(line) == beginPBXNativeTargetSectionPattern {
- isPBXNativeTargetSection = true
- continue
- }
-
- if !isPBXNativeTargetSection {
- continue
- }
-
- // PBXNativeTarget section
-
- if strings.TrimSpace(line) == endPBXNativeTargetPattern {
- pbxNativeTarget := PBXNativeTarget{
- id: id,
- isa: isa,
- dependencies: dependencies,
- name: name,
- productPath: productPath,
- productType: productType,
- }
- pbxNativeTargets = append(pbxNativeTargets, pbxNativeTarget)
-
- id = ""
- isa = ""
- name = ""
- productPath = ""
- productType = ""
- dependencies = []string{}
-
- isPBXNativeTarget = false
- continue
- }
-
- if matches := beginPBXNativeTargetRegexp.FindStringSubmatch(line); len(matches) == 3 {
- id = matches[1]
- name = matches[2]
-
- isPBXNativeTarget = true
- continue
- }
-
- if !isPBXNativeTarget {
- continue
- }
-
- // PBXNativeTarget item
-
- if matches := isaRegexp.FindStringSubmatch(line); len(matches) == 2 {
- isa = strings.Trim(matches[1], `"`)
- }
-
- if matches := nameRegexp.FindStringSubmatch(line); len(matches) == 2 {
- name = strings.Trim(matches[1], `"`)
- }
-
- if matches := productTypeRegexp.FindStringSubmatch(line); len(matches) == 2 {
- productType = strings.Trim(matches[1], `"`)
- }
-
- if matches := productReferenceRegexp.FindStringSubmatch(line); len(matches) == 3 {
- // productId := strings.Trim(matches[1], `"`)
- productPath = strings.Trim(matches[2], `"`)
- }
-
- if isDependencies && strings.TrimSpace(line) == endDependenciesPattern {
- isDependencies = false
- continue
- }
-
- if strings.TrimSpace(line) == beginDependenciesPattern {
- isDependencies = true
- continue
- }
-
- if !isDependencies {
- continue
- }
-
- // dependencies
- if matches := dependencieRegexp.FindStringSubmatch(line); len(matches) == 3 {
- dependencieID := strings.Trim(matches[1], `"`)
- dependencieIsa := strings.Trim(matches[2], `"`)
-
- if dependencieIsa == "PBXTargetDependency" {
- dependencies = append(dependencies, dependencieID)
- }
- }
- }
-
- if err := scanner.Err(); err != nil {
- return []PBXNativeTarget{}, err
- }
-
- return pbxNativeTargets, nil
-}
-
-func targetDependencieWithID(dependencies []PBXTargetDependency, id string) (PBXTargetDependency, bool) {
- for _, dependencie := range dependencies {
- if dependencie.id == id {
- return dependencie, true
- }
- }
- return PBXTargetDependency{}, false
-}
-
-func targetWithID(targets []PBXNativeTarget, id string) (PBXNativeTarget, bool) {
- for _, target := range targets {
- if target.id == id {
- return target, true
- }
- }
- return PBXNativeTarget{}, false
-}
-
-func pbxprojContentTartgets(pbxprojContent string) (map[string]bool, error) {
- targetMap := map[string]bool{}
-
- targets, err := parsePBXNativeTargets(pbxprojContent)
- if err != nil {
- return map[string]bool{}, err
- }
-
- targetDependencies, err := parsePBXTargetDependencies(pbxprojContent)
- if err != nil {
- return map[string]bool{}, err
- }
-
- // Add targets which has test targets
- for _, target := range targets {
- if path.Ext(target.productPath) == ".xctest" {
- if len(target.dependencies) > 0 {
- for _, dependencieID := range target.dependencies {
- dependency, found := targetDependencieWithID(targetDependencies, dependencieID)
- if found {
- dependentTarget, found := targetWithID(targets, dependency.target)
- if found {
- targetMap[dependentTarget.name] = true
- }
- }
- }
- }
- }
- }
-
- // Add targets which has NO test targets
- for _, target := range targets {
- if path.Ext(target.productPath) != ".xctest" {
- _, found := targetMap[target.name]
- if !found {
- targetMap[target.name] = false
- }
- }
- }
-
- return targetMap, nil
-}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/xcode-utils/xcodeproj/xcodeproj_test_file_contents.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/xcode-utils/xcodeproj/xcodeproj_test_file_contents.go
deleted file mode 100644
index 2adfd13e..00000000
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-io/xcode-utils/xcodeproj/xcodeproj_test_file_contents.go
+++ /dev/null
@@ -1,349 +0,0 @@
-package xcodeproj
-
-const (
- schemeContentWithXCTestBuildAction = `
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-`
-
- schemeContentWithoutXCTestBuildAction = `
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-`
-
- pbxNativeTargetSectionWithSpace = `/* Begin PBXTargetDependency section */
- BADDFA051A703F87004C3526 /* PBXTargetDependency */ = {
- isa = PBXTargetDependency;
- target = BADDF9E61A703F87004C3526 /* BitriseSampleAppsiOS With Spaces */;
- targetProxy = BADDFA041A703F87004C3526 /* PBXContainerItemProxy */;
- };
-/* End PBXTargetDependency section */
-
-/* Begin PBXNativeTarget section */
- BADDF9E61A703F87004C3526 /* BitriseSampleAppsiOS With Spaces */ = {
- isa = PBXNativeTarget;
- buildConfigurationList = BADDFA0D1A703F87004C3526 /* Build configuration list for PBXNativeTarget "BitriseSampleAppsiOS With Spaces" */;
- buildPhases = (
- BADDF9E31A703F87004C3526 /* Sources */,
- BADDF9E41A703F87004C3526 /* Frameworks */,
- BADDF9E51A703F87004C3526 /* Resources */,
- );
- buildRules = (
- );
- dependencies = (
- );
- name = "BitriseSampleAppsiOS With Spaces";
- productName = "BitriseSampleAppsiOS With Spaces";
- productReference = BADDF9E71A703F87004C3526 /* BitriseSampleAppsiOS With Spaces.app */;
- productType = "com.apple.product-type.application";
- };
- BADDFA021A703F87004C3526 /* BitriseSampleAppsiOS With SpacesTests */ = {
- isa = PBXNativeTarget;
- buildConfigurationList = BADDFA101A703F87004C3526 /* Build configuration list for PBXNativeTarget "BitriseSampleAppsiOS With SpacesTests" */;
- buildPhases = (
- BADDF9FF1A703F87004C3526 /* Sources */,
- BADDFA001A703F87004C3526 /* Frameworks */,
- BADDFA011A703F87004C3526 /* Resources */,
- );
- buildRules = (
- );
- dependencies = (
- BADDFA051A703F87004C3526 /* PBXTargetDependency */,
- );
- name = "BitriseSampleAppsiOS With SpacesTests";
- productName = "BitriseSampleAppsiOS With SpacesTests";
- productReference = BADDFA031A703F87004C3526 /* BitriseSampleAppsiOS With SpacesTests.xctest */;
- productType = "com.apple.product-type.bundle.unit-test";
- };
-/* End PBXNativeTarget section */
-`
-
- pbxProjContentChunk = `// !$*UTF8*$!
-{
- archiveVersion = 1;
- classes = {
- };
- objectVersion = 46;
- objects = {
-
-/* Begin PBXTargetDependency section */
- BAAFFEEF19EE788800F3AC91 /* PBXTargetDependency */ = {
- isa = PBXTargetDependency;
- target = BAAFFED019EE788800F3AC91 /* SampleAppWithCocoapods */;
- targetProxy = BAAFFEEE19EE788800F3AC91 /* PBXContainerItemProxy */;
- };
-/* End PBXTargetDependency section */
-
-/* Begin PBXNativeTarget section */
- BAAFFED019EE788800F3AC91 /* SampleAppWithCocoapods */ = {
- isa = PBXNativeTarget;
- buildConfigurationList = BAAFFEF719EE788800F3AC91 /* Build configuration list for PBXNativeTarget "SampleAppWithCocoapods" */;
- buildPhases = (
- BAAFFECD19EE788800F3AC91 /* Sources */,
- BAAFFECE19EE788800F3AC91 /* Frameworks */,
- BAAFFECF19EE788800F3AC91 /* Resources */,
- );
- buildRules = (
- );
- dependencies = (
- );
- name = SampleAppWithCocoapods;
- productName = SampleAppWithCocoapods;
- productReference = BAAFFED119EE788800F3AC91 /* SampleAppWithCocoapods.app */;
- productType = "com.apple.product-type.application";
- };
- BAAFFEEC19EE788800F3AC91 /* SampleAppWithCocoapodsTests */ = {
- isa = PBXNativeTarget;
- buildConfigurationList = BAAFFEFA19EE788800F3AC91 /* Build configuration list for PBXNativeTarget "SampleAppWithCocoapodsTests" */;
- buildPhases = (
- 75ACE584234D974D15C5CAE9 /* Check Pods Manifest.lock */,
- BAAFFEE919EE788800F3AC91 /* Sources */,
- BAAFFEEA19EE788800F3AC91 /* Frameworks */,
- BAAFFEEB19EE788800F3AC91 /* Resources */,
- D0F06DBF2FED4262AA6DE7DB /* Copy Pods Resources */,
- );
- buildRules = (
- );
- dependencies = (
- BAAFFEEF19EE788800F3AC91 /* PBXTargetDependency */,
- );
- name = SampleAppWithCocoapodsTests;
- productName = SampleAppWithCocoapodsTests;
- productReference = BAAFFEED19EE788800F3AC91 /* SampleAppWithCocoapodsTests.xctest */;
- productType = "com.apple.product-type.bundle.unit-test";
- };
-/* End PBXNativeTarget section */
-
-/* Begin PBXVariantGroup section */
- BAAFFEE119EE788800F3AC91 /* Main.storyboard */ = {
- isa = PBXVariantGroup;
- children = (
- BAAFFEE219EE788800F3AC91 /* Base */,
- );
- name = Main.storyboard;
- sourceTree = "";
- };
- BAAFFEE619EE788800F3AC91 /* LaunchScreen.xib */ = {
- isa = PBXVariantGroup;
- children = (
- BAAFFEE719EE788800F3AC91 /* Base */,
- );
- name = LaunchScreen.xib;
- sourceTree = "";
- };
-/* End PBXVariantGroup section */
-
- rootObject = BAAFFEC919EE788800F3AC91 /* Project object */;
-}
-`
-
- pbxTargetDependencies = `
- /* End PBXSourcesBuildPhase section */
-
-/* Begin PBXTargetDependency section */
- BAAFFEEF19EE788800F3AC91 /* PBXTargetDependency */ = {
- isa = PBXTargetDependency;
- target = BAAFFED019EE788800F3AC91 /* SampleAppWithCocoapods */;
- targetProxy = BAAFFEEE19EE788800F3AC91 /* PBXContainerItemProxy */;
- };
-/* End PBXTargetDependency section */
-
-/* Begin PBXVariantGroup section */
-`
-)
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/project.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/project.go
new file mode 100644
index 00000000..e9fc0554
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/project.go
@@ -0,0 +1,70 @@
+package xcodeproj
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+
+ "github.com/bitrise-io/go-utils/pathutil"
+)
+
+// ProjectModel ...
+type ProjectModel struct {
+ Pth string
+ Name string
+ SDKs []string
+ SharedSchemes []SchemeModel
+ Targets []TargetModel
+}
+
+// NewProject ...
+func NewProject(xcodeprojPth string) (ProjectModel, error) {
+ project := ProjectModel{
+ Pth: xcodeprojPth,
+ Name: strings.TrimSuffix(filepath.Base(xcodeprojPth), filepath.Ext(xcodeprojPth)),
+ }
+
+ // SDK
+ pbxprojPth := filepath.Join(xcodeprojPth, "project.pbxproj")
+
+ if exist, err := pathutil.IsPathExists(pbxprojPth); err != nil {
+ return ProjectModel{}, err
+ } else if !exist {
+ return ProjectModel{}, fmt.Errorf("Project descriptor not found at: %s", pbxprojPth)
+ }
+
+ sdks, err := GetBuildConfigSDKs(pbxprojPth)
+ if err != nil {
+ return ProjectModel{}, err
+ }
+
+ project.SDKs = sdks
+
+ // Shared Schemes
+ schemes, err := ProjectSharedSchemes(xcodeprojPth)
+ if err != nil {
+ return ProjectModel{}, err
+ }
+
+ project.SharedSchemes = schemes
+
+ // Targets
+ targets, err := ProjectTargets(xcodeprojPth)
+ if err != nil {
+ return ProjectModel{}, err
+ }
+
+ project.Targets = targets
+
+ return project, nil
+}
+
+// ContainsSDK ...
+func (p ProjectModel) ContainsSDK(sdk string) bool {
+ for _, s := range p.SDKs {
+ if s == sdk {
+ return true
+ }
+ }
+ return false
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/workspace.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/workspace.go
new file mode 100644
index 00000000..1d9d03be
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/workspace.go
@@ -0,0 +1,78 @@
+package xcodeproj
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+
+ "github.com/bitrise-io/go-utils/pathutil"
+)
+
+// WorkspaceModel ...
+type WorkspaceModel struct {
+ Pth string
+ Name string
+ Projects []ProjectModel
+ IsPodWorkspace bool
+}
+
+// NewWorkspace ...
+func NewWorkspace(xcworkspacePth string, projectsToCheck ...string) (WorkspaceModel, error) {
+ workspace := WorkspaceModel{
+ Pth: xcworkspacePth,
+ Name: strings.TrimSuffix(filepath.Base(xcworkspacePth), filepath.Ext(xcworkspacePth)),
+ }
+
+ projects, err := WorkspaceProjectReferences(xcworkspacePth)
+ if err != nil {
+ return WorkspaceModel{}, err
+ }
+
+ if len(projectsToCheck) > 0 {
+ filteredProjects := []string{}
+ for _, project := range projects {
+ for _, projectToCheck := range projectsToCheck {
+ if project == projectToCheck {
+ filteredProjects = append(filteredProjects, project)
+ }
+ }
+ }
+ projects = filteredProjects
+ }
+
+ for _, xcodeprojPth := range projects {
+
+ if exist, err := pathutil.IsPathExists(xcodeprojPth); err != nil {
+ return WorkspaceModel{}, err
+ } else if !exist {
+ return WorkspaceModel{}, fmt.Errorf("referred project (%s) not found", xcodeprojPth)
+ }
+
+ project, err := NewProject(xcodeprojPth)
+ if err != nil {
+ return WorkspaceModel{}, err
+ }
+
+ workspace.Projects = append(workspace.Projects, project)
+ }
+
+ return workspace, nil
+}
+
+// GetSharedSchemes ...
+func (w WorkspaceModel) GetSharedSchemes() []SchemeModel {
+ schemes := []SchemeModel{}
+ for _, project := range w.Projects {
+ schemes = append(schemes, project.SharedSchemes...)
+ }
+ return schemes
+}
+
+// GetTargets ...
+func (w WorkspaceModel) GetTargets() []TargetModel {
+ targets := []TargetModel{}
+ for _, project := range w.Projects {
+ targets = append(targets, project.Targets...)
+ }
+ return targets
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/xcodeproj.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/xcodeproj.go
new file mode 100644
index 00000000..c1b28ca6
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/xcodeproj.go
@@ -0,0 +1,84 @@
+package xcodeproj
+
+import (
+ "bufio"
+ "regexp"
+ "strings"
+
+ "github.com/bitrise-io/go-utils/fileutil"
+)
+
+// Extensions
+const (
+ // XCWorkspaceExt ...
+ XCWorkspaceExt = ".xcworkspace"
+ // XCodeProjExt ...
+ XCodeProjExt = ".xcodeproj"
+ // XCSchemeExt ...
+ XCSchemeExt = ".xcscheme"
+)
+
+// IsXCodeProj ...
+func IsXCodeProj(pth string) bool {
+ return strings.HasSuffix(pth, XCodeProjExt)
+}
+
+// IsXCWorkspace ...
+func IsXCWorkspace(pth string) bool {
+ return strings.HasSuffix(pth, XCWorkspaceExt)
+}
+
+// GetBuildConfigSDKs ...
+func GetBuildConfigSDKs(pbxprojPth string) ([]string, error) {
+ content, err := fileutil.ReadStringFromFile(pbxprojPth)
+ if err != nil {
+ return []string{}, err
+ }
+
+ return getBuildConfigSDKsFromContent(content)
+}
+
+func getBuildConfigSDKsFromContent(pbxprojContent string) ([]string, error) {
+ sdkMap := map[string]bool{}
+
+ beginXCBuildConfigurationSection := `/* Begin XCBuildConfiguration section */`
+ endXCBuildConfigurationSection := `/* End XCBuildConfiguration section */`
+ isXCBuildConfigurationSection := false
+
+ // SDKROOT = macosx;
+ pattern := `SDKROOT = (?P.*);`
+ regexp := regexp.MustCompile(pattern)
+
+ scanner := bufio.NewScanner(strings.NewReader(pbxprojContent))
+ for scanner.Scan() {
+ line := scanner.Text()
+
+ if strings.TrimSpace(line) == endXCBuildConfigurationSection {
+ break
+ }
+
+ if strings.TrimSpace(line) == beginXCBuildConfigurationSection {
+ isXCBuildConfigurationSection = true
+ continue
+ }
+
+ if !isXCBuildConfigurationSection {
+ continue
+ }
+
+ if match := regexp.FindStringSubmatch(line); len(match) == 2 {
+ sdk := match[1]
+ sdkMap[sdk] = true
+ }
+ }
+ if err := scanner.Err(); err != nil {
+ return []string{}, err
+ }
+
+ sdks := []string{}
+ for sdk := range sdkMap {
+ sdks = append(sdks, sdk)
+ }
+
+ return sdks, nil
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/xcodeproj_test_files.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/xcodeproj_test_files.go
new file mode 100644
index 00000000..7fd70984
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/xcodeproj_test_files.go
@@ -0,0 +1,1326 @@
+package xcodeproj
+
+import (
+ "path/filepath"
+
+ "github.com/bitrise-io/go-utils/fileutil"
+ "github.com/bitrise-io/go-utils/pathutil"
+)
+
+func testIOSPbxprojPth() (string, error) {
+ tmpDir, err := pathutil.NormalizedOSTempDirPath("__bitrise_init__")
+ if err != nil {
+ return "", err
+ }
+
+ pth := filepath.Join(tmpDir, "project.pbxproj")
+
+ if err := fileutil.WriteStringToFile(pth, testIOSPbxprojContent); err != nil {
+ return "", err
+ }
+
+ return pth, nil
+}
+
+func testMacOSPbxprojPth() (string, error) {
+ tmpDir, err := pathutil.NormalizedOSTempDirPath("__bitrise_init__")
+ if err != nil {
+ return "", err
+ }
+
+ pth := filepath.Join(tmpDir, "project.pbxproj")
+
+ if err := fileutil.WriteStringToFile(pth, testMacOSPbxprojContent); err != nil {
+ return "", err
+ }
+
+ return pth, nil
+}
+
+const testMacOSPbxprojContent = `// !$*UTF8*$!
+{
+ archiveVersion = 1;
+ classes = {
+ };
+ objectVersion = 46;
+ objects = {
+
+/* Begin PBXBuildFile section */
+ 1302F7441D95BA4A005CE678 /* Session.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1302F7431D95BA4A005CE678 /* Session.swift */; };
+ 130E6BBE1D95BBB4009D3C78 /* Command.swift in Sources */ = {isa = PBXBuildFile; fileRef = 138F9EE61D8E7ABC00515FCA /* Command.swift */; };
+ 131A3D9A1D90543F002DAF99 /* Realm.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 131A3D961D9053BB002DAF99 /* Realm.framework */; };
+ 131A3D9B1D90543F002DAF99 /* Realm.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 131A3D961D9053BB002DAF99 /* Realm.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
+ 131A3D9C1D90543F002DAF99 /* RealmSwift.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 131A3D971D9053BB002DAF99 /* RealmSwift.framework */; };
+ 131A3D9D1D90543F002DAF99 /* RealmSwift.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 131A3D971D9053BB002DAF99 /* RealmSwift.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
+ 131A3DA61D9060AA002DAF99 /* Yaml.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 131A3DA11D90609E002DAF99 /* Yaml.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
+ 131A3DA71D9060C0002DAF99 /* Yaml.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 131A3DA11D90609E002DAF99 /* Yaml.framework */; };
+ 131A3DAB1D906BFA002DAF99 /* BitriseTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 131A3DAA1D906BFA002DAF99 /* BitriseTool.swift */; };
+ 131A3DAD1D906D54002DAF99 /* Bitrise.swift in Sources */ = {isa = PBXBuildFile; fileRef = 131A3DAC1D906D54002DAF99 /* Bitrise.swift */; };
+ 131A3DAF1D906DAB002DAF99 /* Envman.swift in Sources */ = {isa = PBXBuildFile; fileRef = 131A3DAE1D906DAB002DAF99 /* Envman.swift */; };
+ 131A3DB11D906DBF002DAF99 /* Stepman.swift in Sources */ = {isa = PBXBuildFile; fileRef = 131A3DB01D906DBF002DAF99 /* Stepman.swift */; };
+ 131ACE731D93054B007E71E9 /* ToolManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 131ACE721D93054B007E71E9 /* ToolManager.swift */; };
+ 131ACE761D9323E3007E71E9 /* Version.swift in Sources */ = {isa = PBXBuildFile; fileRef = 131ACE751D9323E3007E71E9 /* Version.swift */; };
+ 13A95E821D966F040061B54F /* BashSessionViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13A95E811D966F040061B54F /* BashSessionViewController.swift */; };
+ 13AAE1BD1D8426FF00AEE66D /* FileManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13AAE1BC1D8426FF00AEE66D /* FileManager.swift */; };
+ 13B0B8F31D872E93006EA29C /* RealmManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13B0B8F21D872E93006EA29C /* RealmManager.swift */; };
+ 13B958CD1D89E87600D3310D /* SystemInfoViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13B958CC1D89E87600D3310D /* SystemInfoViewController.swift */; };
+ 13B958D01D89EC7800D3310D /* RunViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13B958CE1D89EC7800D3310D /* RunViewController.swift */; };
+ 13C989811D8319600028BA2C /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13C989801D8319600028BA2C /* AppDelegate.swift */; };
+ 13C989851D8319600028BA2C /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 13C989841D8319600028BA2C /* Assets.xcassets */; };
+ 13C989881D8319600028BA2C /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 13C989861D8319600028BA2C /* Main.storyboard */; };
+ 13C989931D8319600028BA2C /* BitriseStudioTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13C989921D8319600028BA2C /* BitriseStudioTests.swift */; };
+ 13C9899E1D8319600028BA2C /* BitriseStudioUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13C9899D1D8319600028BA2C /* BitriseStudioUITests.swift */; };
+ 13CA73D61D84A74800B1A323 /* AddProjectViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13CA73D51D84A74800B1A323 /* AddProjectViewController.swift */; };
+ 13CA73DE1D84B67200B1A323 /* Project.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13CA73DD1D84B67200B1A323 /* Project.swift */; };
+ 13E3F5531D83477300AE7C20 /* ProjectsViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13C989821D8319600028BA2C /* ProjectsViewController.swift */; };
+ 13FF5FCE1D9859EE008C7DFB /* Log.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13FF5FCD1D9859EE008C7DFB /* Log.swift */; };
+ 13FF5FD11D98620A008C7DFB /* String+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13FF5FD01D98620A008C7DFB /* String+Extensions.swift */; };
+ 13FF5FD31D9862DA008C7DFB /* Data+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13FF5FD21D9862DA008C7DFB /* Data+Extensions.swift */; };
+ 13FF5FD51D9872EC008C7DFB /* Pipe+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13FF5FD41D9872EC008C7DFB /* Pipe+Extensions.swift */; };
+/* End PBXBuildFile section */
+
+/* Begin PBXContainerItemProxy section */
+ 13C9898F1D8319600028BA2C /* PBXContainerItemProxy */ = {
+ isa = PBXContainerItemProxy;
+ containerPortal = 13C989751D83195F0028BA2C /* Project object */;
+ proxyType = 1;
+ remoteGlobalIDString = 13C9897C1D83195F0028BA2C;
+ remoteInfo = BitriseStudio;
+ };
+ 13C9899A1D8319600028BA2C /* PBXContainerItemProxy */ = {
+ isa = PBXContainerItemProxy;
+ containerPortal = 13C989751D83195F0028BA2C /* Project object */;
+ proxyType = 1;
+ remoteGlobalIDString = 13C9897C1D83195F0028BA2C;
+ remoteInfo = BitriseStudio;
+ };
+/* End PBXContainerItemProxy section */
+
+/* Begin PBXCopyFilesBuildPhase section */
+ 131A3D9E1D90543F002DAF99 /* Embed Frameworks */ = {
+ isa = PBXCopyFilesBuildPhase;
+ buildActionMask = 2147483647;
+ dstPath = "";
+ dstSubfolderSpec = 10;
+ files = (
+ 131A3D9D1D90543F002DAF99 /* RealmSwift.framework in Embed Frameworks */,
+ 131A3D9B1D90543F002DAF99 /* Realm.framework in Embed Frameworks */,
+ 131A3DA61D9060AA002DAF99 /* Yaml.framework in Embed Frameworks */,
+ );
+ name = "Embed Frameworks";
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXCopyFilesBuildPhase section */
+
+/* Begin PBXFileReference section */
+ 1302F7431D95BA4A005CE678 /* Session.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Session.swift; sourceTree = ""; };
+ 131A3D961D9053BB002DAF99 /* Realm.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Realm.framework; path = Framworks/Realm.framework; sourceTree = ""; };
+ 131A3D971D9053BB002DAF99 /* RealmSwift.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = RealmSwift.framework; path = Framworks/RealmSwift.framework; sourceTree = ""; };
+ 131A3DA11D90609E002DAF99 /* Yaml.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Yaml.framework; path = Framworks/Yaml.framework; sourceTree = ""; };
+ 131A3DAA1D906BFA002DAF99 /* BitriseTool.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = BitriseTool.swift; sourceTree = ""; };
+ 131A3DAC1D906D54002DAF99 /* Bitrise.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Bitrise.swift; sourceTree = ""; };
+ 131A3DAE1D906DAB002DAF99 /* Envman.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Envman.swift; sourceTree = ""; };
+ 131A3DB01D906DBF002DAF99 /* Stepman.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Stepman.swift; sourceTree = ""; };
+ 131ACE721D93054B007E71E9 /* ToolManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ToolManager.swift; sourceTree = ""; };
+ 131ACE751D9323E3007E71E9 /* Version.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Version.swift; sourceTree = ""; };
+ 138F9EE61D8E7ABC00515FCA /* Command.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Command.swift; sourceTree = ""; };
+ 13A95E811D966F040061B54F /* BashSessionViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = BashSessionViewController.swift; sourceTree = ""; };
+ 13AAE1BC1D8426FF00AEE66D /* FileManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FileManager.swift; sourceTree = ""; };
+ 13B0B8F21D872E93006EA29C /* RealmManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RealmManager.swift; sourceTree = ""; };
+ 13B958CC1D89E87600D3310D /* SystemInfoViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SystemInfoViewController.swift; sourceTree = ""; };
+ 13B958CE1D89EC7800D3310D /* RunViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RunViewController.swift; sourceTree = ""; };
+ 13C9897D1D8319600028BA2C /* BitriseStudio.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = BitriseStudio.app; sourceTree = BUILT_PRODUCTS_DIR; };
+ 13C989801D8319600028BA2C /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; };
+ 13C989821D8319600028BA2C /* ProjectsViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProjectsViewController.swift; sourceTree = ""; };
+ 13C989841D8319600028BA2C /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
+ 13C989871D8319600028BA2C /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
+ 13C989891D8319600028BA2C /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
+ 13C9898E1D8319600028BA2C /* BitriseStudioTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = BitriseStudioTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
+ 13C989921D8319600028BA2C /* BitriseStudioTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BitriseStudioTests.swift; sourceTree = ""; };
+ 13C989941D8319600028BA2C /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
+ 13C989991D8319600028BA2C /* BitriseStudioUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = BitriseStudioUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
+ 13C9899D1D8319600028BA2C /* BitriseStudioUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BitriseStudioUITests.swift; sourceTree = ""; };
+ 13C9899F1D8319600028BA2C /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
+ 13CA73D51D84A74800B1A323 /* AddProjectViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AddProjectViewController.swift; sourceTree = ""; };
+ 13CA73DD1D84B67200B1A323 /* Project.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Project.swift; sourceTree = ""; };
+ 13FF5FCD1D9859EE008C7DFB /* Log.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Log.swift; sourceTree = ""; };
+ 13FF5FD01D98620A008C7DFB /* String+Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "String+Extensions.swift"; sourceTree = ""; };
+ 13FF5FD21D9862DA008C7DFB /* Data+Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "Data+Extensions.swift"; sourceTree = ""; };
+ 13FF5FD41D9872EC008C7DFB /* Pipe+Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "Pipe+Extensions.swift"; sourceTree = ""; };
+/* End PBXFileReference section */
+
+/* Begin PBXFrameworksBuildPhase section */
+ 13C9897A1D83195F0028BA2C /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 131A3D9C1D90543F002DAF99 /* RealmSwift.framework in Frameworks */,
+ 131A3D9A1D90543F002DAF99 /* Realm.framework in Frameworks */,
+ 131A3DA71D9060C0002DAF99 /* Yaml.framework in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 13C9898B1D8319600028BA2C /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 13C989961D8319600028BA2C /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXFrameworksBuildPhase section */
+
+/* Begin PBXGroup section */
+ 1302F7421D95BA35005CE678 /* Bash */ = {
+ isa = PBXGroup;
+ children = (
+ 1302F7431D95BA4A005CE678 /* Session.swift */,
+ 138F9EE61D8E7ABC00515FCA /* Command.swift */,
+ );
+ name = Bash;
+ sourceTree = "";
+ };
+ 131ACE741D9323C1007E71E9 /* Version */ = {
+ isa = PBXGroup;
+ children = (
+ 131ACE751D9323E3007E71E9 /* Version.swift */,
+ );
+ name = Version;
+ sourceTree = "";
+ };
+ 13C989741D83195F0028BA2C = {
+ isa = PBXGroup;
+ children = (
+ 13C9897F1D8319600028BA2C /* BitriseStudio */,
+ 13C989911D8319600028BA2C /* BitriseStudioTests */,
+ 13C9899C1D8319600028BA2C /* BitriseStudioUITests */,
+ 13C9897E1D8319600028BA2C /* Products */,
+ 13CA73D71D84B5C500B1A323 /* Frameworks */,
+ );
+ sourceTree = "";
+ };
+ 13C9897E1D8319600028BA2C /* Products */ = {
+ isa = PBXGroup;
+ children = (
+ 13C9897D1D8319600028BA2C /* BitriseStudio.app */,
+ 13C9898E1D8319600028BA2C /* BitriseStudioTests.xctest */,
+ 13C989991D8319600028BA2C /* BitriseStudioUITests.xctest */,
+ );
+ name = Products;
+ sourceTree = "";
+ };
+ 13C9897F1D8319600028BA2C /* BitriseStudio */ = {
+ isa = PBXGroup;
+ children = (
+ 13FF5FCF1D9861E3008C7DFB /* Extensions */,
+ 13FF5FCC1D9859DB008C7DFB /* Log */,
+ 1302F7421D95BA35005CE678 /* Bash */,
+ 131ACE741D9323C1007E71E9 /* Version */,
+ 13CA73DC1D84B63E00B1A323 /* Models */,
+ 13E3F5501D8342DD00AE7C20 /* Managers */,
+ 13E3F54D1D8341DF00AE7C20 /* Controllers */,
+ 13E3F54E1D83429900AE7C20 /* Supporting Files */,
+ 13E3F54F1D8342BC00AE7C20 /* Assets */,
+ 13C989801D8319600028BA2C /* AppDelegate.swift */,
+ );
+ path = BitriseStudio;
+ sourceTree = "";
+ };
+ 13C989911D8319600028BA2C /* BitriseStudioTests */ = {
+ isa = PBXGroup;
+ children = (
+ 13C989921D8319600028BA2C /* BitriseStudioTests.swift */,
+ 13C989941D8319600028BA2C /* Info.plist */,
+ );
+ path = BitriseStudioTests;
+ sourceTree = "";
+ };
+ 13C9899C1D8319600028BA2C /* BitriseStudioUITests */ = {
+ isa = PBXGroup;
+ children = (
+ 13C9899D1D8319600028BA2C /* BitriseStudioUITests.swift */,
+ 13C9899F1D8319600028BA2C /* Info.plist */,
+ );
+ path = BitriseStudioUITests;
+ sourceTree = "";
+ };
+ 13CA73D71D84B5C500B1A323 /* Frameworks */ = {
+ isa = PBXGroup;
+ children = (
+ 131A3DA11D90609E002DAF99 /* Yaml.framework */,
+ 131A3D961D9053BB002DAF99 /* Realm.framework */,
+ 131A3D971D9053BB002DAF99 /* RealmSwift.framework */,
+ );
+ name = Frameworks;
+ sourceTree = "";
+ };
+ 13CA73DC1D84B63E00B1A323 /* Models */ = {
+ isa = PBXGroup;
+ children = (
+ 13CA73DD1D84B67200B1A323 /* Project.swift */,
+ 131A3DAA1D906BFA002DAF99 /* BitriseTool.swift */,
+ 131A3DAC1D906D54002DAF99 /* Bitrise.swift */,
+ 131A3DAE1D906DAB002DAF99 /* Envman.swift */,
+ 131A3DB01D906DBF002DAF99 /* Stepman.swift */,
+ );
+ name = Models;
+ sourceTree = "";
+ };
+ 13E3F54D1D8341DF00AE7C20 /* Controllers */ = {
+ isa = PBXGroup;
+ children = (
+ 13C989861D8319600028BA2C /* Main.storyboard */,
+ 13C989821D8319600028BA2C /* ProjectsViewController.swift */,
+ 13CA73D51D84A74800B1A323 /* AddProjectViewController.swift */,
+ 13B958CC1D89E87600D3310D /* SystemInfoViewController.swift */,
+ 13B958CE1D89EC7800D3310D /* RunViewController.swift */,
+ 13A95E811D966F040061B54F /* BashSessionViewController.swift */,
+ );
+ name = Controllers;
+ sourceTree = "";
+ };
+ 13E3F54E1D83429900AE7C20 /* Supporting Files */ = {
+ isa = PBXGroup;
+ children = (
+ 13C989891D8319600028BA2C /* Info.plist */,
+ );
+ name = "Supporting Files";
+ sourceTree = "";
+ };
+ 13E3F54F1D8342BC00AE7C20 /* Assets */ = {
+ isa = PBXGroup;
+ children = (
+ 13C989841D8319600028BA2C /* Assets.xcassets */,
+ );
+ name = Assets;
+ sourceTree = "";
+ };
+ 13E3F5501D8342DD00AE7C20 /* Managers */ = {
+ isa = PBXGroup;
+ children = (
+ 13AAE1BC1D8426FF00AEE66D /* FileManager.swift */,
+ 13B0B8F21D872E93006EA29C /* RealmManager.swift */,
+ 131ACE721D93054B007E71E9 /* ToolManager.swift */,
+ );
+ name = Managers;
+ sourceTree = "";
+ };
+ 13FF5FCC1D9859DB008C7DFB /* Log */ = {
+ isa = PBXGroup;
+ children = (
+ 13FF5FCD1D9859EE008C7DFB /* Log.swift */,
+ );
+ name = Log;
+ sourceTree = "";
+ };
+ 13FF5FCF1D9861E3008C7DFB /* Extensions */ = {
+ isa = PBXGroup;
+ children = (
+ 13FF5FD01D98620A008C7DFB /* String+Extensions.swift */,
+ 13FF5FD21D9862DA008C7DFB /* Data+Extensions.swift */,
+ 13FF5FD41D9872EC008C7DFB /* Pipe+Extensions.swift */,
+ );
+ name = Extensions;
+ sourceTree = "";
+ };
+/* End PBXGroup section */
+
+/* Begin PBXNativeTarget section */
+ 13C9897C1D83195F0028BA2C /* BitriseStudio */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 13C989A21D8319600028BA2C /* Build configuration list for PBXNativeTarget "BitriseStudio" */;
+ buildPhases = (
+ 13C989791D83195F0028BA2C /* Sources */,
+ 13C9897A1D83195F0028BA2C /* Frameworks */,
+ 13C9897B1D83195F0028BA2C /* Resources */,
+ 131A3D9E1D90543F002DAF99 /* Embed Frameworks */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ name = BitriseStudio;
+ productName = BitriseStudio;
+ productReference = 13C9897D1D8319600028BA2C /* BitriseStudio.app */;
+ productType = "com.apple.product-type.application";
+ };
+ 13C9898D1D8319600028BA2C /* BitriseStudioTests */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 13C989A51D8319600028BA2C /* Build configuration list for PBXNativeTarget "BitriseStudioTests" */;
+ buildPhases = (
+ 13C9898A1D8319600028BA2C /* Sources */,
+ 13C9898B1D8319600028BA2C /* Frameworks */,
+ 13C9898C1D8319600028BA2C /* Resources */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ 13C989901D8319600028BA2C /* PBXTargetDependency */,
+ );
+ name = BitriseStudioTests;
+ productName = BitriseStudioTests;
+ productReference = 13C9898E1D8319600028BA2C /* BitriseStudioTests.xctest */;
+ productType = "com.apple.product-type.bundle.unit-test";
+ };
+ 13C989981D8319600028BA2C /* BitriseStudioUITests */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 13C989A81D8319600028BA2C /* Build configuration list for PBXNativeTarget "BitriseStudioUITests" */;
+ buildPhases = (
+ 13C989951D8319600028BA2C /* Sources */,
+ 13C989961D8319600028BA2C /* Frameworks */,
+ 13C989971D8319600028BA2C /* Resources */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ 13C9899B1D8319600028BA2C /* PBXTargetDependency */,
+ );
+ name = BitriseStudioUITests;
+ productName = BitriseStudioUITests;
+ productReference = 13C989991D8319600028BA2C /* BitriseStudioUITests.xctest */;
+ productType = "com.apple.product-type.bundle.ui-testing";
+ };
+/* End PBXNativeTarget section */
+
+/* Begin PBXProject section */
+ 13C989751D83195F0028BA2C /* Project object */ = {
+ isa = PBXProject;
+ attributes = {
+ LastSwiftUpdateCheck = 0800;
+ LastUpgradeCheck = 0810;
+ ORGANIZATIONNAME = "Krisztian Goedrei";
+ TargetAttributes = {
+ 13C9897C1D83195F0028BA2C = {
+ CreatedOnToolsVersion = 8.0;
+ DevelopmentTeam = 9NS44DLTN7;
+ ProvisioningStyle = Manual;
+ };
+ 13C9898D1D8319600028BA2C = {
+ CreatedOnToolsVersion = 8.0;
+ DevelopmentTeam = L935L4GU3F;
+ ProvisioningStyle = Automatic;
+ TestTargetID = 13C9897C1D83195F0028BA2C;
+ };
+ 13C989981D8319600028BA2C = {
+ CreatedOnToolsVersion = 8.0;
+ DevelopmentTeam = L935L4GU3F;
+ ProvisioningStyle = Automatic;
+ TestTargetID = 13C9897C1D83195F0028BA2C;
+ };
+ };
+ };
+ buildConfigurationList = 13C989781D83195F0028BA2C /* Build configuration list for PBXProject "BitriseStudio" */;
+ compatibilityVersion = "Xcode 3.2";
+ developmentRegion = English;
+ hasScannedForEncodings = 0;
+ knownRegions = (
+ en,
+ Base,
+ );
+ mainGroup = 13C989741D83195F0028BA2C;
+ productRefGroup = 13C9897E1D8319600028BA2C /* Products */;
+ projectDirPath = "";
+ projectRoot = "";
+ targets = (
+ 13C9897C1D83195F0028BA2C /* BitriseStudio */,
+ 13C9898D1D8319600028BA2C /* BitriseStudioTests */,
+ 13C989981D8319600028BA2C /* BitriseStudioUITests */,
+ );
+ };
+/* End PBXProject section */
+
+/* Begin PBXResourcesBuildPhase section */
+ 13C9897B1D83195F0028BA2C /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 13C989851D8319600028BA2C /* Assets.xcassets in Resources */,
+ 13C989881D8319600028BA2C /* Main.storyboard in Resources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 13C9898C1D8319600028BA2C /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 13C989971D8319600028BA2C /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXResourcesBuildPhase section */
+
+/* Begin PBXSourcesBuildPhase section */
+ 13C989791D83195F0028BA2C /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 13FF5FD11D98620A008C7DFB /* String+Extensions.swift in Sources */,
+ 131A3DB11D906DBF002DAF99 /* Stepman.swift in Sources */,
+ 13B0B8F31D872E93006EA29C /* RealmManager.swift in Sources */,
+ 13FF5FCE1D9859EE008C7DFB /* Log.swift in Sources */,
+ 131A3DAB1D906BFA002DAF99 /* BitriseTool.swift in Sources */,
+ 13E3F5531D83477300AE7C20 /* ProjectsViewController.swift in Sources */,
+ 130E6BBE1D95BBB4009D3C78 /* Command.swift in Sources */,
+ 13FF5FD51D9872EC008C7DFB /* Pipe+Extensions.swift in Sources */,
+ 131ACE731D93054B007E71E9 /* ToolManager.swift in Sources */,
+ 13A95E821D966F040061B54F /* BashSessionViewController.swift in Sources */,
+ 13C989811D8319600028BA2C /* AppDelegate.swift in Sources */,
+ 1302F7441D95BA4A005CE678 /* Session.swift in Sources */,
+ 131ACE761D9323E3007E71E9 /* Version.swift in Sources */,
+ 13B958CD1D89E87600D3310D /* SystemInfoViewController.swift in Sources */,
+ 13CA73D61D84A74800B1A323 /* AddProjectViewController.swift in Sources */,
+ 131A3DAD1D906D54002DAF99 /* Bitrise.swift in Sources */,
+ 13FF5FD31D9862DA008C7DFB /* Data+Extensions.swift in Sources */,
+ 13AAE1BD1D8426FF00AEE66D /* FileManager.swift in Sources */,
+ 13B958D01D89EC7800D3310D /* RunViewController.swift in Sources */,
+ 131A3DAF1D906DAB002DAF99 /* Envman.swift in Sources */,
+ 13CA73DE1D84B67200B1A323 /* Project.swift in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 13C9898A1D8319600028BA2C /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 13C989931D8319600028BA2C /* BitriseStudioTests.swift in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 13C989951D8319600028BA2C /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 13C9899E1D8319600028BA2C /* BitriseStudioUITests.swift in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXSourcesBuildPhase section */
+
+/* Begin PBXTargetDependency section */
+ 13C989901D8319600028BA2C /* PBXTargetDependency */ = {
+ isa = PBXTargetDependency;
+ target = 13C9897C1D83195F0028BA2C /* BitriseStudio */;
+ targetProxy = 13C9898F1D8319600028BA2C /* PBXContainerItemProxy */;
+ };
+ 13C9899B1D8319600028BA2C /* PBXTargetDependency */ = {
+ isa = PBXTargetDependency;
+ target = 13C9897C1D83195F0028BA2C /* BitriseStudio */;
+ targetProxy = 13C9899A1D8319600028BA2C /* PBXContainerItemProxy */;
+ };
+/* End PBXTargetDependency section */
+
+/* Begin PBXVariantGroup section */
+ 13C989861D8319600028BA2C /* Main.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ 13C989871D8319600028BA2C /* Base */,
+ );
+ name = Main.storyboard;
+ sourceTree = "";
+ };
+/* End PBXVariantGroup section */
+
+/* Begin XCBuildConfiguration section */
+ 13C989A01D8319600028BA2C /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_SUSPICIOUS_MOVE = YES;
+ CLANG_WARN_SUSPICIOUS_MOVES = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ CODE_SIGN_IDENTITY = "-";
+ COPY_PHASE_STRIP = NO;
+ DEBUG_INFORMATION_FORMAT = dwarf;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ ENABLE_TESTABILITY = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_DYNAMIC_NO_PIC = NO;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_OPTIMIZATION_LEVEL = 0;
+ GCC_PREPROCESSOR_DEFINITIONS = (
+ "DEBUG=1",
+ "$(inherited)",
+ );
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ MACOSX_DEPLOYMENT_TARGET = 10.11;
+ MTL_ENABLE_DEBUG_INFO = YES;
+ ONLY_ACTIVE_ARCH = YES;
+ SDKROOT = macosx;
+ SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
+ SWIFT_OPTIMIZATION_LEVEL = "-Onone";
+ };
+ name = Debug;
+ };
+ 13C989A11D8319600028BA2C /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_SUSPICIOUS_MOVE = YES;
+ CLANG_WARN_SUSPICIOUS_MOVES = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ CODE_SIGN_IDENTITY = "-";
+ COPY_PHASE_STRIP = NO;
+ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ ENABLE_NS_ASSERTIONS = NO;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ MACOSX_DEPLOYMENT_TARGET = 10.11;
+ MTL_ENABLE_DEBUG_INFO = NO;
+ SDKROOT = macosx;
+ SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
+ };
+ name = Release;
+ };
+ 13C989A31D8319600028BA2C /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ CODE_SIGN_IDENTITY = "Mac Developer: Some Dude (KYXQXCWE3G)";
+ COMBINE_HIDPI_IMAGES = YES;
+ DEVELOPMENT_TEAM = 9NS44DLTN7;
+ FRAMEWORK_SEARCH_PATHS = (
+ "$(inherited)",
+ "$(PROJECT_DIR)/Carthage/Build/Mac",
+ "$(PROJECT_DIR)/Framworks",
+ );
+ INFOPLIST_FILE = BitriseStudio/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks";
+ PRODUCT_BUNDLE_IDENTIFIER = com.godrei.BitriseStudio;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ PROVISIONING_PROFILE = "b17a1b90-9459-4620-9332-347d399f7cd9";
+ PROVISIONING_PROFILE_SPECIFIER = "Mac Development Wildcard";
+ SWIFT_VERSION = 3.0;
+ };
+ name = Debug;
+ };
+ 13C989A41D8319600028BA2C /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ CODE_SIGN_IDENTITY = "3rd Party Mac Developer Application";
+ COMBINE_HIDPI_IMAGES = YES;
+ DEVELOPMENT_TEAM = 9NS44DLTN7;
+ FRAMEWORK_SEARCH_PATHS = (
+ "$(inherited)",
+ "$(PROJECT_DIR)/Carthage/Build/Mac",
+ "$(PROJECT_DIR)/Framworks",
+ );
+ INFOPLIST_FILE = BitriseStudio/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks";
+ PRODUCT_BUNDLE_IDENTIFIER = com.godrei.BitriseStudio;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ PROVISIONING_PROFILE = "1bb807b8-a953-459e-85ca-c86d3fe13645";
+ PROVISIONING_PROFILE_SPECIFIER = "Mac App-Store Wildcards";
+ SWIFT_VERSION = 3.0;
+ };
+ name = Release;
+ };
+ 13C989A61D8319600028BA2C /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
+ BUNDLE_LOADER = "$(TEST_HOST)";
+ COMBINE_HIDPI_IMAGES = YES;
+ DEVELOPMENT_TEAM = L935L4GU3F;
+ INFOPLIST_FILE = BitriseStudioTests/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks @loader_path/../Frameworks";
+ PRODUCT_BUNDLE_IDENTIFIER = godrei.BitriseStudioTests;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SWIFT_VERSION = 3.0;
+ TEST_HOST = "$(BUILT_PRODUCTS_DIR)/BitriseStudio.app/Contents/MacOS/BitriseStudio";
+ };
+ name = Debug;
+ };
+ 13C989A71D8319600028BA2C /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
+ BUNDLE_LOADER = "$(TEST_HOST)";
+ COMBINE_HIDPI_IMAGES = YES;
+ DEVELOPMENT_TEAM = L935L4GU3F;
+ INFOPLIST_FILE = BitriseStudioTests/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks @loader_path/../Frameworks";
+ PRODUCT_BUNDLE_IDENTIFIER = godrei.BitriseStudioTests;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SWIFT_VERSION = 3.0;
+ TEST_HOST = "$(BUILT_PRODUCTS_DIR)/BitriseStudio.app/Contents/MacOS/BitriseStudio";
+ };
+ name = Release;
+ };
+ 13C989A91D8319600028BA2C /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
+ COMBINE_HIDPI_IMAGES = YES;
+ DEVELOPMENT_TEAM = L935L4GU3F;
+ INFOPLIST_FILE = BitriseStudioUITests/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks @loader_path/../Frameworks";
+ PRODUCT_BUNDLE_IDENTIFIER = godrei.BitriseStudioUITests;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SWIFT_VERSION = 3.0;
+ TEST_TARGET_NAME = BitriseStudio;
+ };
+ name = Debug;
+ };
+ 13C989AA1D8319600028BA2C /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
+ COMBINE_HIDPI_IMAGES = YES;
+ DEVELOPMENT_TEAM = L935L4GU3F;
+ INFOPLIST_FILE = BitriseStudioUITests/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks @loader_path/../Frameworks";
+ PRODUCT_BUNDLE_IDENTIFIER = godrei.BitriseStudioUITests;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SWIFT_VERSION = 3.0;
+ TEST_TARGET_NAME = BitriseStudio;
+ };
+ name = Release;
+ };
+/* End XCBuildConfiguration section */
+
+/* Begin XCConfigurationList section */
+ 13C989781D83195F0028BA2C /* Build configuration list for PBXProject "BitriseStudio" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 13C989A01D8319600028BA2C /* Debug */,
+ 13C989A11D8319600028BA2C /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 13C989A21D8319600028BA2C /* Build configuration list for PBXNativeTarget "BitriseStudio" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 13C989A31D8319600028BA2C /* Debug */,
+ 13C989A41D8319600028BA2C /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 13C989A51D8319600028BA2C /* Build configuration list for PBXNativeTarget "BitriseStudioTests" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 13C989A61D8319600028BA2C /* Debug */,
+ 13C989A71D8319600028BA2C /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 13C989A81D8319600028BA2C /* Build configuration list for PBXNativeTarget "BitriseStudioUITests" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 13C989A91D8319600028BA2C /* Debug */,
+ 13C989AA1D8319600028BA2C /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+/* End XCConfigurationList section */
+ };
+ rootObject = 13C989751D83195F0028BA2C /* Project object */;
+}
+`
+
+const testIOSPbxprojContent = `
+// !$*UTF8*$!
+{
+ archiveVersion = 1;
+ classes = {
+ };
+ objectVersion = 46;
+ objects = {
+
+/* Begin PBXBuildFile section */
+ 13C4D5AB1DDDDED300D5DC29 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13C4D5AA1DDDDED300D5DC29 /* AppDelegate.swift */; };
+ 13C4D5AD1DDDDED300D5DC29 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13C4D5AC1DDDDED300D5DC29 /* ViewController.swift */; };
+ 13C4D5B01DDDDED300D5DC29 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 13C4D5AE1DDDDED300D5DC29 /* Main.storyboard */; };
+ 13C4D5B21DDDDED300D5DC29 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 13C4D5B11DDDDED300D5DC29 /* Assets.xcassets */; };
+ 13C4D5B51DDDDED300D5DC29 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 13C4D5B31DDDDED300D5DC29 /* LaunchScreen.storyboard */; };
+ 13C4D5C01DDDDED400D5DC29 /* BitriseFastlaneSampleTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13C4D5BF1DDDDED400D5DC29 /* BitriseFastlaneSampleTests.swift */; };
+ 13C4D5CB1DDDDED400D5DC29 /* BitriseFastlaneSampleUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 13C4D5CA1DDDDED400D5DC29 /* BitriseFastlaneSampleUITests.swift */; };
+/* End PBXBuildFile section */
+
+/* Begin PBXContainerItemProxy section */
+ 13C4D5BC1DDDDED400D5DC29 /* PBXContainerItemProxy */ = {
+ isa = PBXContainerItemProxy;
+ containerPortal = 13C4D59F1DDDDED300D5DC29 /* Project object */;
+ proxyType = 1;
+ remoteGlobalIDString = 13C4D5A61DDDDED300D5DC29;
+ remoteInfo = BitriseFastlaneSample;
+ };
+ 13C4D5C71DDDDED400D5DC29 /* PBXContainerItemProxy */ = {
+ isa = PBXContainerItemProxy;
+ containerPortal = 13C4D59F1DDDDED300D5DC29 /* Project object */;
+ proxyType = 1;
+ remoteGlobalIDString = 13C4D5A61DDDDED300D5DC29;
+ remoteInfo = BitriseFastlaneSample;
+ };
+/* End PBXContainerItemProxy section */
+
+/* Begin PBXFileReference section */
+ 13C4D5A71DDDDED300D5DC29 /* BitriseFastlaneSample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = BitriseFastlaneSample.app; sourceTree = BUILT_PRODUCTS_DIR; };
+ 13C4D5AA1DDDDED300D5DC29 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; };
+ 13C4D5AC1DDDDED300D5DC29 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; };
+ 13C4D5AF1DDDDED300D5DC29 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
+ 13C4D5B11DDDDED300D5DC29 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
+ 13C4D5B41DDDDED300D5DC29 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
+ 13C4D5B61DDDDED300D5DC29 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
+ 13C4D5BB1DDDDED400D5DC29 /* BitriseFastlaneSampleTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = BitriseFastlaneSampleTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
+ 13C4D5BF1DDDDED400D5DC29 /* BitriseFastlaneSampleTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BitriseFastlaneSampleTests.swift; sourceTree = ""; };
+ 13C4D5C11DDDDED400D5DC29 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
+ 13C4D5C61DDDDED400D5DC29 /* BitriseFastlaneSampleUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = BitriseFastlaneSampleUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
+ 13C4D5CA1DDDDED400D5DC29 /* BitriseFastlaneSampleUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BitriseFastlaneSampleUITests.swift; sourceTree = ""; };
+ 13C4D5CC1DDDDED400D5DC29 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
+/* End PBXFileReference section */
+
+/* Begin PBXFrameworksBuildPhase section */
+ 13C4D5A41DDDDED300D5DC29 /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 13C4D5B81DDDDED400D5DC29 /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 13C4D5C31DDDDED400D5DC29 /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXFrameworksBuildPhase section */
+
+/* Begin PBXGroup section */
+ 13C4D59E1DDDDED300D5DC29 = {
+ isa = PBXGroup;
+ children = (
+ 13C4D5A91DDDDED300D5DC29 /* BitriseFastlaneSample */,
+ 13C4D5BE1DDDDED400D5DC29 /* BitriseFastlaneSampleTests */,
+ 13C4D5C91DDDDED400D5DC29 /* BitriseFastlaneSampleUITests */,
+ 13C4D5A81DDDDED300D5DC29 /* Products */,
+ );
+ sourceTree = "";
+ };
+ 13C4D5A81DDDDED300D5DC29 /* Products */ = {
+ isa = PBXGroup;
+ children = (
+ 13C4D5A71DDDDED300D5DC29 /* BitriseFastlaneSample.app */,
+ 13C4D5BB1DDDDED400D5DC29 /* BitriseFastlaneSampleTests.xctest */,
+ 13C4D5C61DDDDED400D5DC29 /* BitriseFastlaneSampleUITests.xctest */,
+ );
+ name = Products;
+ sourceTree = "";
+ };
+ 13C4D5A91DDDDED300D5DC29 /* BitriseFastlaneSample */ = {
+ isa = PBXGroup;
+ children = (
+ 13C4D5AA1DDDDED300D5DC29 /* AppDelegate.swift */,
+ 13C4D5AC1DDDDED300D5DC29 /* ViewController.swift */,
+ 13C4D5AE1DDDDED300D5DC29 /* Main.storyboard */,
+ 13C4D5B11DDDDED300D5DC29 /* Assets.xcassets */,
+ 13C4D5B31DDDDED300D5DC29 /* LaunchScreen.storyboard */,
+ 13C4D5B61DDDDED300D5DC29 /* Info.plist */,
+ );
+ path = BitriseFastlaneSample;
+ sourceTree = "";
+ };
+ 13C4D5BE1DDDDED400D5DC29 /* BitriseFastlaneSampleTests */ = {
+ isa = PBXGroup;
+ children = (
+ 13C4D5BF1DDDDED400D5DC29 /* BitriseFastlaneSampleTests.swift */,
+ 13C4D5C11DDDDED400D5DC29 /* Info.plist */,
+ );
+ path = BitriseFastlaneSampleTests;
+ sourceTree = "";
+ };
+ 13C4D5C91DDDDED400D5DC29 /* BitriseFastlaneSampleUITests */ = {
+ isa = PBXGroup;
+ children = (
+ 13C4D5CA1DDDDED400D5DC29 /* BitriseFastlaneSampleUITests.swift */,
+ 13C4D5CC1DDDDED400D5DC29 /* Info.plist */,
+ );
+ path = BitriseFastlaneSampleUITests;
+ sourceTree = "";
+ };
+/* End PBXGroup section */
+
+/* Begin PBXNativeTarget section */
+ 13C4D5A61DDDDED300D5DC29 /* BitriseFastlaneSample */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 13C4D5CF1DDDDED400D5DC29 /* Build configuration list for PBXNativeTarget "BitriseFastlaneSample" */;
+ buildPhases = (
+ 13C4D5A31DDDDED300D5DC29 /* Sources */,
+ 13C4D5A41DDDDED300D5DC29 /* Frameworks */,
+ 13C4D5A51DDDDED300D5DC29 /* Resources */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ name = BitriseFastlaneSample;
+ productName = BitriseFastlaneSample;
+ productReference = 13C4D5A71DDDDED300D5DC29 /* BitriseFastlaneSample.app */;
+ productType = "com.apple.product-type.application";
+ };
+ 13C4D5BA1DDDDED400D5DC29 /* BitriseFastlaneSampleTests */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 13C4D5D21DDDDED400D5DC29 /* Build configuration list for PBXNativeTarget "BitriseFastlaneSampleTests" */;
+ buildPhases = (
+ 13C4D5B71DDDDED400D5DC29 /* Sources */,
+ 13C4D5B81DDDDED400D5DC29 /* Frameworks */,
+ 13C4D5B91DDDDED400D5DC29 /* Resources */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ 13C4D5BD1DDDDED400D5DC29 /* PBXTargetDependency */,
+ );
+ name = BitriseFastlaneSampleTests;
+ productName = BitriseFastlaneSampleTests;
+ productReference = 13C4D5BB1DDDDED400D5DC29 /* BitriseFastlaneSampleTests.xctest */;
+ productType = "com.apple.product-type.bundle.unit-test";
+ };
+ 13C4D5C51DDDDED400D5DC29 /* BitriseFastlaneSampleUITests */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 13C4D5D51DDDDED400D5DC29 /* Build configuration list for PBXNativeTarget "BitriseFastlaneSampleUITests" */;
+ buildPhases = (
+ 13C4D5C21DDDDED400D5DC29 /* Sources */,
+ 13C4D5C31DDDDED400D5DC29 /* Frameworks */,
+ 13C4D5C41DDDDED400D5DC29 /* Resources */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ 13C4D5C81DDDDED400D5DC29 /* PBXTargetDependency */,
+ );
+ name = BitriseFastlaneSampleUITests;
+ productName = BitriseFastlaneSampleUITests;
+ productReference = 13C4D5C61DDDDED400D5DC29 /* BitriseFastlaneSampleUITests.xctest */;
+ productType = "com.apple.product-type.bundle.ui-testing";
+ };
+/* End PBXNativeTarget section */
+
+/* Begin PBXProject section */
+ 13C4D59F1DDDDED300D5DC29 /* Project object */ = {
+ isa = PBXProject;
+ attributes = {
+ LastSwiftUpdateCheck = 0810;
+ LastUpgradeCheck = 0810;
+ ORGANIZATIONNAME = "Krisztian Goedrei";
+ TargetAttributes = {
+ 13C4D5A61DDDDED300D5DC29 = {
+ CreatedOnToolsVersion = 8.1;
+ DevelopmentTeam = 9NS44DLTN7;
+ ProvisioningStyle = Manual;
+ };
+ 13C4D5BA1DDDDED400D5DC29 = {
+ CreatedOnToolsVersion = 8.1;
+ DevelopmentTeam = 72SA8V3WYL;
+ ProvisioningStyle = Automatic;
+ TestTargetID = 13C4D5A61DDDDED300D5DC29;
+ };
+ 13C4D5C51DDDDED400D5DC29 = {
+ CreatedOnToolsVersion = 8.1;
+ DevelopmentTeam = 72SA8V3WYL;
+ ProvisioningStyle = Automatic;
+ TestTargetID = 13C4D5A61DDDDED300D5DC29;
+ };
+ };
+ };
+ buildConfigurationList = 13C4D5A21DDDDED300D5DC29 /* Build configuration list for PBXProject "BitriseFastlaneSample" */;
+ compatibilityVersion = "Xcode 3.2";
+ developmentRegion = English;
+ hasScannedForEncodings = 0;
+ knownRegions = (
+ en,
+ Base,
+ );
+ mainGroup = 13C4D59E1DDDDED300D5DC29;
+ productRefGroup = 13C4D5A81DDDDED300D5DC29 /* Products */;
+ projectDirPath = "";
+ projectRoot = "";
+ targets = (
+ 13C4D5A61DDDDED300D5DC29 /* BitriseFastlaneSample */,
+ 13C4D5BA1DDDDED400D5DC29 /* BitriseFastlaneSampleTests */,
+ 13C4D5C51DDDDED400D5DC29 /* BitriseFastlaneSampleUITests */,
+ );
+ };
+/* End PBXProject section */
+
+/* Begin PBXResourcesBuildPhase section */
+ 13C4D5A51DDDDED300D5DC29 /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 13C4D5B51DDDDED300D5DC29 /* LaunchScreen.storyboard in Resources */,
+ 13C4D5B21DDDDED300D5DC29 /* Assets.xcassets in Resources */,
+ 13C4D5B01DDDDED300D5DC29 /* Main.storyboard in Resources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 13C4D5B91DDDDED400D5DC29 /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 13C4D5C41DDDDED400D5DC29 /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXResourcesBuildPhase section */
+
+/* Begin PBXSourcesBuildPhase section */
+ 13C4D5A31DDDDED300D5DC29 /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 13C4D5AD1DDDDED300D5DC29 /* ViewController.swift in Sources */,
+ 13C4D5AB1DDDDED300D5DC29 /* AppDelegate.swift in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 13C4D5B71DDDDED400D5DC29 /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 13C4D5C01DDDDED400D5DC29 /* BitriseFastlaneSampleTests.swift in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 13C4D5C21DDDDED400D5DC29 /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 13C4D5CB1DDDDED400D5DC29 /* BitriseFastlaneSampleUITests.swift in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXSourcesBuildPhase section */
+
+/* Begin PBXTargetDependency section */
+ 13C4D5BD1DDDDED400D5DC29 /* PBXTargetDependency */ = {
+ isa = PBXTargetDependency;
+ target = 13C4D5A61DDDDED300D5DC29 /* BitriseFastlaneSample */;
+ targetProxy = 13C4D5BC1DDDDED400D5DC29 /* PBXContainerItemProxy */;
+ };
+ 13C4D5C81DDDDED400D5DC29 /* PBXTargetDependency */ = {
+ isa = PBXTargetDependency;
+ target = 13C4D5A61DDDDED300D5DC29 /* BitriseFastlaneSample */;
+ targetProxy = 13C4D5C71DDDDED400D5DC29 /* PBXContainerItemProxy */;
+ };
+/* End PBXTargetDependency section */
+
+/* Begin PBXVariantGroup section */
+ 13C4D5AE1DDDDED300D5DC29 /* Main.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ 13C4D5AF1DDDDED300D5DC29 /* Base */,
+ );
+ name = Main.storyboard;
+ sourceTree = "";
+ };
+ 13C4D5B31DDDDED300D5DC29 /* LaunchScreen.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ 13C4D5B41DDDDED300D5DC29 /* Base */,
+ );
+ name = LaunchScreen.storyboard;
+ sourceTree = "";
+ };
+/* End PBXVariantGroup section */
+
+/* Begin XCBuildConfiguration section */
+ 13C4D5CD1DDDDED400D5DC29 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_SUSPICIOUS_MOVES = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ DEBUG_INFORMATION_FORMAT = dwarf;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ ENABLE_TESTABILITY = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_DYNAMIC_NO_PIC = NO;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_OPTIMIZATION_LEVEL = 0;
+ GCC_PREPROCESSOR_DEFINITIONS = (
+ "DEBUG=1",
+ "$(inherited)",
+ );
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ IPHONEOS_DEPLOYMENT_TARGET = 10.1;
+ MTL_ENABLE_DEBUG_INFO = YES;
+ ONLY_ACTIVE_ARCH = YES;
+ SDKROOT = iphoneos;
+ SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
+ SWIFT_OPTIMIZATION_LEVEL = "-Onone";
+ TARGETED_DEVICE_FAMILY = "1,2";
+ };
+ name = Debug;
+ };
+ 13C4D5CE1DDDDED400D5DC29 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_SUSPICIOUS_MOVES = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ ENABLE_NS_ASSERTIONS = NO;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ IPHONEOS_DEPLOYMENT_TARGET = 10.1;
+ MTL_ENABLE_DEBUG_INFO = NO;
+ SDKROOT = iphoneos;
+ SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
+ TARGETED_DEVICE_FAMILY = "1,2";
+ VALIDATE_PRODUCT = YES;
+ };
+ name = Release;
+ };
+ 13C4D5D01DDDDED400D5DC29 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Distribution";
+ DEVELOPMENT_TEAM = 9NS44DLTN7;
+ INFOPLIST_FILE = BitriseFastlaneSample/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+ PRODUCT_BUNDLE_IDENTIFIER = com.bitrise.BitriseFastlaneSample;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ PROVISIONING_PROFILE = "8e4701a8-01fb-4467-aad7-5a6c541795f0";
+ PROVISIONING_PROFILE_SPECIFIER = "match AppStore com.bitrise.BitriseFastlaneSample";
+ SWIFT_VERSION = 3.0;
+ };
+ name = Debug;
+ };
+ 13C4D5D11DDDDED400D5DC29 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Distribution";
+ DEVELOPMENT_TEAM = 9NS44DLTN7;
+ INFOPLIST_FILE = BitriseFastlaneSample/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+ PRODUCT_BUNDLE_IDENTIFIER = com.bitrise.BitriseFastlaneSample;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ PROVISIONING_PROFILE = "8e4701a8-01fb-4467-aad7-5a6c541795f0";
+ PROVISIONING_PROFILE_SPECIFIER = "match AppStore com.bitrise.BitriseFastlaneSample";
+ SWIFT_VERSION = 3.0;
+ };
+ name = Release;
+ };
+ 13C4D5D31DDDDED400D5DC29 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
+ BUNDLE_LOADER = "$(TEST_HOST)";
+ DEVELOPMENT_TEAM = 72SA8V3WYL;
+ INFOPLIST_FILE = BitriseFastlaneSampleTests/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
+ PRODUCT_BUNDLE_IDENTIFIER = com.bitrise.BitriseFastlaneSampleTests;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SWIFT_VERSION = 3.0;
+ TEST_HOST = "$(BUILT_PRODUCTS_DIR)/BitriseFastlaneSample.app/BitriseFastlaneSample";
+ };
+ name = Debug;
+ };
+ 13C4D5D41DDDDED400D5DC29 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
+ BUNDLE_LOADER = "$(TEST_HOST)";
+ DEVELOPMENT_TEAM = 72SA8V3WYL;
+ INFOPLIST_FILE = BitriseFastlaneSampleTests/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
+ PRODUCT_BUNDLE_IDENTIFIER = com.bitrise.BitriseFastlaneSampleTests;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SWIFT_VERSION = 3.0;
+ TEST_HOST = "$(BUILT_PRODUCTS_DIR)/BitriseFastlaneSample.app/BitriseFastlaneSample";
+ };
+ name = Release;
+ };
+ 13C4D5D61DDDDED400D5DC29 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
+ DEVELOPMENT_TEAM = 72SA8V3WYL;
+ INFOPLIST_FILE = BitriseFastlaneSampleUITests/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
+ PRODUCT_BUNDLE_IDENTIFIER = com.bitrise.BitriseFastlaneSampleUITests;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SWIFT_VERSION = 3.0;
+ TEST_TARGET_NAME = BitriseFastlaneSample;
+ };
+ name = Debug;
+ };
+ 13C4D5D71DDDDED400D5DC29 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
+ DEVELOPMENT_TEAM = 72SA8V3WYL;
+ INFOPLIST_FILE = BitriseFastlaneSampleUITests/Info.plist;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
+ PRODUCT_BUNDLE_IDENTIFIER = com.bitrise.BitriseFastlaneSampleUITests;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SWIFT_VERSION = 3.0;
+ TEST_TARGET_NAME = BitriseFastlaneSample;
+ };
+ name = Release;
+ };
+/* End XCBuildConfiguration section */
+
+/* Begin XCConfigurationList section */
+ 13C4D5A21DDDDED300D5DC29 /* Build configuration list for PBXProject "BitriseFastlaneSample" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 13C4D5CD1DDDDED400D5DC29 /* Debug */,
+ 13C4D5CE1DDDDED400D5DC29 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 13C4D5CF1DDDDED400D5DC29 /* Build configuration list for PBXNativeTarget "BitriseFastlaneSample" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 13C4D5D01DDDDED400D5DC29 /* Debug */,
+ 13C4D5D11DDDDED400D5DC29 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 13C4D5D21DDDDED400D5DC29 /* Build configuration list for PBXNativeTarget "BitriseFastlaneSampleTests" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 13C4D5D31DDDDED400D5DC29 /* Debug */,
+ 13C4D5D41DDDDED400D5DC29 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 13C4D5D51DDDDED400D5DC29 /* Build configuration list for PBXNativeTarget "BitriseFastlaneSampleUITests" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 13C4D5D61DDDDED400D5DC29 /* Debug */,
+ 13C4D5D71DDDDED400D5DC29 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+/* End XCConfigurationList section */
+ };
+ rootObject = 13C4D59F1DDDDED300D5DC29 /* Project object */;
+}
+`
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/xcscheme.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/xcscheme.go
new file mode 100644
index 00000000..ee944229
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/bitrise-tools/go-xcode/xcodeproj/xcscheme.go
@@ -0,0 +1,244 @@
+package xcodeproj
+
+import (
+ "bufio"
+ "fmt"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strings"
+
+ "github.com/bitrise-io/go-utils/fileutil"
+ "github.com/bitrise-io/go-utils/pathutil"
+)
+
+// SchemeModel ...
+type SchemeModel struct {
+ Name string
+ HasXCTest bool
+}
+
+func filterSharedSchemeFilePaths(paths []string) []string {
+ isSharedSchemeFilePath := func(pth string) bool {
+ regexpPattern := filepath.Join(".*[/]?xcshareddata", "xcschemes", ".+[.]xcscheme")
+ regexp := regexp.MustCompile(regexpPattern)
+ return (regexp.FindString(pth) != "")
+ }
+
+ filteredPaths := []string{}
+ for _, pth := range paths {
+ if isSharedSchemeFilePath(pth) {
+ filteredPaths = append(filteredPaths, pth)
+ }
+ }
+
+ sort.Strings(filteredPaths)
+
+ return filteredPaths
+}
+
+func sharedSchemeFilePaths(projectOrWorkspacePth string) ([]string, error) {
+ filesInDir := func(dir string) ([]string, error) {
+ files := []string{}
+ if err := filepath.Walk(dir, func(path string, f os.FileInfo, err error) error {
+ files = append(files, path)
+ return nil
+ }); err != nil {
+ return []string{}, err
+ }
+ return files, nil
+ }
+
+ paths, err := filesInDir(projectOrWorkspacePth)
+ if err != nil {
+ return []string{}, err
+ }
+ return filterSharedSchemeFilePaths(paths), nil
+}
+
+// SchemeNameFromPath ...
+func SchemeNameFromPath(schemePth string) string {
+ basename := filepath.Base(schemePth)
+ ext := filepath.Ext(schemePth)
+ if ext != XCSchemeExt {
+ return ""
+ }
+ return strings.TrimSuffix(basename, ext)
+}
+
+func schemeFileContentContainsXCTestBuildAction(schemeFileContent string) (bool, error) {
+ testActionStartPattern := ".+)"`)
+ testableReferenceEndPattern := ""
+ isTestableReference := false
+
+ xctestBuildableReferenceNameRegexp := regexp.MustCompile(`BuildableName = ".+.xctest"`)
+
+ scanner := bufio.NewScanner(strings.NewReader(schemeFileContent))
+ for scanner.Scan() {
+ line := scanner.Text()
+
+ if strings.TrimSpace(line) == testActionEndPattern {
+ break
+ }
+
+ if strings.TrimSpace(line) == testActionStartPattern {
+ isTestableAction = true
+ continue
+ }
+
+ if !isTestableAction {
+ continue
+ }
+
+ // TestAction
+
+ if strings.TrimSpace(line) == testableReferenceEndPattern {
+ isTestableReference = false
+ continue
+ }
+
+ if strings.TrimSpace(line) == testableReferenceStartPattern {
+ isTestableReference = true
+ continue
+ }
+
+ if !isTestableReference {
+ continue
+ }
+
+ // TestableReference
+
+ if matches := testableReferenceSkippedRegexp.FindStringSubmatch(line); len(matches) > 1 {
+ skipped := matches[1]
+ if skipped != "NO" {
+ break
+ }
+ }
+
+ if match := xctestBuildableReferenceNameRegexp.FindString(line); match != "" {
+ return true, nil
+ }
+ }
+
+ if err := scanner.Err(); err != nil {
+ return false, err
+ }
+
+ return false, nil
+}
+
+// SchemeFileContainsXCTestBuildAction ...
+func SchemeFileContainsXCTestBuildAction(schemeFilePth string) (bool, error) {
+ content, err := fileutil.ReadStringFromFile(schemeFilePth)
+ if err != nil {
+ return false, err
+ }
+
+ return schemeFileContentContainsXCTestBuildAction(content)
+}
+
+func sharedSchemes(projectOrWorkspacePth string) ([]SchemeModel, error) {
+ schemePaths, err := sharedSchemeFilePaths(projectOrWorkspacePth)
+ if err != nil {
+ return []SchemeModel{}, err
+ }
+
+ schemes := []SchemeModel{}
+ for _, schemePth := range schemePaths {
+ schemeName := SchemeNameFromPath(schemePth)
+
+ hasXCTest, err := SchemeFileContainsXCTestBuildAction(schemePth)
+ if err != nil {
+ return []SchemeModel{}, err
+ }
+
+ schemes = append(schemes, SchemeModel{
+ Name: schemeName,
+ HasXCTest: hasXCTest,
+ })
+ }
+
+ return schemes, nil
+}
+
+// ProjectSharedSchemes ...
+func ProjectSharedSchemes(projectPth string) ([]SchemeModel, error) {
+ return sharedSchemes(projectPth)
+}
+
+// WorkspaceProjectReferences ...
+func WorkspaceProjectReferences(workspace string) ([]string, error) {
+ projects := []string{}
+
+ workspaceDir := filepath.Dir(workspace)
+
+ xcworkspacedataPth := path.Join(workspace, "contents.xcworkspacedata")
+ if exist, err := pathutil.IsPathExists(xcworkspacedataPth); err != nil {
+ return []string{}, err
+ } else if !exist {
+ return []string{}, fmt.Errorf("contents.xcworkspacedata does not exist at: %s", xcworkspacedataPth)
+ }
+
+ xcworkspacedataStr, err := fileutil.ReadStringFromFile(xcworkspacedataPth)
+ if err != nil {
+ return []string{}, err
+ }
+
+ xcworkspacedataLines := strings.Split(xcworkspacedataStr, "\n")
+ fileRefStart := false
+ regexp := regexp.MustCompile(`location = "(.+):(.+).xcodeproj"`)
+
+ for _, line := range xcworkspacedataLines {
+ if strings.Contains(line, "[A-Z0-9]+) /\* (?P.*) \*/ = {`)
+ endPBXNativeTargetPattern := `};`
+ isPBXNativeTarget := false
+
+ // isa = PBXNativeTarget;
+ isaRegexp := regexp.MustCompile(`\s*isa = (?P.*);`)
+
+ beginDependenciesPattern := `dependencies = (`
+ dependencieRegexp := regexp.MustCompile(`\s*(?P[A-Z0-9]+) /\* (?P.*) \*/,`)
+ endDependenciesPattern := `);`
+ isDependencies := false
+
+ // name = SampleAppWithCocoapods;
+ nameRegexp := regexp.MustCompile(`\s*name = (?P.*);`)
+ // productReference = BAAFFEED19EE788800F3AC91 /* SampleAppWithCocoapodsTests.xctest */;
+ productReferenceRegexp := regexp.MustCompile(`\s*productReference = (?P[A-Z0-9]+) /\* (?P.*) \*/;`)
+ // productType = "com.apple.product-type.bundle.unit-test";
+ productTypeRegexp := regexp.MustCompile(`\s*productType = (?P.*);`)
+
+ scanner := bufio.NewScanner(strings.NewReader(pbxprojContent))
+ for scanner.Scan() {
+ line := scanner.Text()
+
+ if strings.TrimSpace(line) == endPBXNativeTargetSectionPattern {
+ break
+ }
+
+ if strings.TrimSpace(line) == beginPBXNativeTargetSectionPattern {
+ isPBXNativeTargetSection = true
+ continue
+ }
+
+ if !isPBXNativeTargetSection {
+ continue
+ }
+
+ // PBXNativeTarget section
+
+ if strings.TrimSpace(line) == endPBXNativeTargetPattern {
+ pbxNativeTarget := PBXNativeTarget{
+ id: id,
+ isa: isa,
+ dependencies: dependencies,
+ name: name,
+ productPath: productPath,
+ productType: productType,
+ }
+ pbxNativeTargets = append(pbxNativeTargets, pbxNativeTarget)
+
+ id = ""
+ isa = ""
+ name = ""
+ productPath = ""
+ productType = ""
+ dependencies = []string{}
+
+ isPBXNativeTarget = false
+ continue
+ }
+
+ if matches := beginPBXNativeTargetRegexp.FindStringSubmatch(line); len(matches) == 3 {
+ id = matches[1]
+ name = matches[2]
+
+ isPBXNativeTarget = true
+ continue
+ }
+
+ if !isPBXNativeTarget {
+ continue
+ }
+
+ // PBXNativeTarget item
+
+ if matches := isaRegexp.FindStringSubmatch(line); len(matches) == 2 {
+ isa = strings.Trim(matches[1], `"`)
+ }
+
+ if matches := nameRegexp.FindStringSubmatch(line); len(matches) == 2 {
+ name = strings.Trim(matches[1], `"`)
+ }
+
+ if matches := productTypeRegexp.FindStringSubmatch(line); len(matches) == 2 {
+ productType = strings.Trim(matches[1], `"`)
+ }
+
+ if matches := productReferenceRegexp.FindStringSubmatch(line); len(matches) == 3 {
+ // productId := strings.Trim(matches[1], `"`)
+ productPath = strings.Trim(matches[2], `"`)
+ }
+
+ if isDependencies && strings.TrimSpace(line) == endDependenciesPattern {
+ isDependencies = false
+ continue
+ }
+
+ if strings.TrimSpace(line) == beginDependenciesPattern {
+ isDependencies = true
+ continue
+ }
+
+ if !isDependencies {
+ continue
+ }
+
+ // dependencies
+ if matches := dependencieRegexp.FindStringSubmatch(line); len(matches) == 3 {
+ dependencieID := strings.Trim(matches[1], `"`)
+ dependencieIsa := strings.Trim(matches[2], `"`)
+
+ if dependencieIsa == "PBXTargetDependency" {
+ dependencies = append(dependencies, dependencieID)
+ }
+ }
+ }
+
+ if err := scanner.Err(); err != nil {
+ return []PBXNativeTarget{}, err
+ }
+
+ return pbxNativeTargets, nil
+}
+
+func parsePBXTargetDependencies(pbxprojContent string) ([]PBXTargetDependency, error) {
+ pbxTargetDependencies := []PBXTargetDependency{}
+
+ id := ""
+ isa := ""
+ target := ""
+
+ beginPBXTargetDependencySectionPattern := `/* Begin PBXTargetDependency section */`
+ endPBXTargetDependencySectionPattern := `/* End PBXTargetDependency section */`
+ isPBXTargetDependencySection := false
+
+ // BAAFFEEF19EE788800F3AC91 /* PBXTargetDependency */ = {
+ beginPBXTargetDependencyRegexp := regexp.MustCompile(`\s*(?P[A-Z0-9]+) /\* (?P.*) \*/ = {`)
+ endPBXTargetDependencyPattern := `};`
+ isPBXTargetDependency := false
+
+ // isa = PBXTargetDependency;
+ isaRegexp := regexp.MustCompile(`\s*isa = (?P.*);`)
+ // target = BAAFFED019EE788800F3AC91 /* SampleAppWithCocoapods */;
+ targetRegexp := regexp.MustCompile(`\s*target = (?P[A-Z0-9]+) /\* (?P.*) \*/;`)
+
+ scanner := bufio.NewScanner(strings.NewReader(pbxprojContent))
+ for scanner.Scan() {
+ line := scanner.Text()
+
+ if strings.TrimSpace(line) == endPBXTargetDependencySectionPattern {
+ break
+ }
+
+ if strings.TrimSpace(line) == beginPBXTargetDependencySectionPattern {
+ isPBXTargetDependencySection = true
+ continue
+ }
+
+ if !isPBXTargetDependencySection {
+ continue
+ }
+
+ // PBXTargetDependency section
+
+ if strings.TrimSpace(line) == endPBXTargetDependencyPattern {
+ pbxTargetDependency := PBXTargetDependency{
+ id: id,
+ isa: isa,
+ target: target,
+ }
+ pbxTargetDependencies = append(pbxTargetDependencies, pbxTargetDependency)
+
+ id = ""
+ isa = ""
+ target = ""
+
+ isPBXTargetDependency = false
+ continue
+ }
+
+ if matches := beginPBXTargetDependencyRegexp.FindStringSubmatch(line); len(matches) == 3 {
+ id = matches[1]
+ isa = matches[2]
+
+ isPBXTargetDependency = true
+ continue
+ }
+
+ if !isPBXTargetDependency {
+ continue
+ }
+
+ // PBXTargetDependency item
+
+ if matches := isaRegexp.FindStringSubmatch(line); len(matches) == 2 {
+ isa = strings.Trim(matches[1], `"`)
+ }
+
+ if matches := targetRegexp.FindStringSubmatch(line); len(matches) == 3 {
+ targetID := strings.Trim(matches[1], `"`)
+ // targetName := strings.Trim(matches[2], `"`)
+
+ target = targetID
+ }
+ }
+
+ return pbxTargetDependencies, nil
+}
+
+func targetDependencieWithID(dependencies []PBXTargetDependency, id string) (PBXTargetDependency, bool) {
+ for _, dependencie := range dependencies {
+ if dependencie.id == id {
+ return dependencie, true
+ }
+ }
+ return PBXTargetDependency{}, false
+}
+
+func targetWithID(targets []PBXNativeTarget, id string) (PBXNativeTarget, bool) {
+ for _, target := range targets {
+ if target.id == id {
+ return target, true
+ }
+ }
+ return PBXNativeTarget{}, false
+}
+
+func pbxprojContentTartgets(pbxprojContent string) ([]TargetModel, error) {
+ targetMap := map[string]TargetModel{}
+
+ nativeTargets, err := parsePBXNativeTargets(pbxprojContent)
+ if err != nil {
+ return []TargetModel{}, err
+ }
+
+ targetDependencies, err := parsePBXTargetDependencies(pbxprojContent)
+ if err != nil {
+ return []TargetModel{}, err
+ }
+
+ // Add targets which has test targets
+ for _, target := range nativeTargets {
+ if path.Ext(target.productPath) == ".xctest" {
+ if len(target.dependencies) > 0 {
+ for _, dependencieID := range target.dependencies {
+ dependency, found := targetDependencieWithID(targetDependencies, dependencieID)
+ if found {
+ dependentTarget, found := targetWithID(nativeTargets, dependency.target)
+ if found {
+ targetMap[dependentTarget.name] = TargetModel{
+ Name: dependentTarget.name,
+ HasXCTest: true,
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // Add targets which has NO test targets
+ for _, target := range nativeTargets {
+ if path.Ext(target.productPath) != ".xctest" {
+ _, found := targetMap[target.name]
+ if !found {
+ targetMap[target.name] = TargetModel{
+ Name: target.name,
+ HasXCTest: false,
+ }
+ }
+ }
+ }
+
+ targets := []TargetModel{}
+ for _, target := range targetMap {
+ targets = append(targets, target)
+ }
+
+ return targets, nil
+}
+
+// ProjectTargets ...
+func ProjectTargets(projectPth string) ([]TargetModel, error) {
+ pbxProjPth := filepath.Join(projectPth, "project.pbxproj")
+ if exist, err := pathutil.IsPathExists(pbxProjPth); err != nil {
+ return []TargetModel{}, err
+ } else if !exist {
+ return []TargetModel{}, fmt.Errorf("project.pbxproj does not exist at: %s", pbxProjPth)
+ }
+
+ content, err := fileutil.ReadStringFromFile(pbxProjPth)
+ if err != nil {
+ return []TargetModel{}, err
+ }
+
+ return pbxprojContentTartgets(content)
+}
+
+// WorkspaceTargets ...
+func WorkspaceTargets(workspacePth string) ([]TargetModel, error) {
+ projects, err := WorkspaceProjectReferences(workspacePth)
+ if err != nil {
+ return []TargetModel{}, err
+ }
+
+ targets := []TargetModel{}
+ for _, project := range projects {
+ projectTargets, err := ProjectTargets(project)
+ if err != nil {
+ return []TargetModel{}, err
+ }
+
+ targets = append(targets, projectTargets...)
+ }
+
+ return targets, nil
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/LICENSE b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/LICENSE
index bb673323..c8364161 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/LICENSE
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/LICENSE
@@ -1,6 +1,6 @@
ISC License
-Copyright (c) 2012-2013 Dave Collins
+Copyright (c) 2012-2016 Dave Collins
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/bypass.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/bypass.go
index d42a0bc4..8a4a6589 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/bypass.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/bypass.go
@@ -1,4 +1,4 @@
-// Copyright (c) 2015 Dave Collins
+// Copyright (c) 2015-2016 Dave Collins
//
// Permission to use, copy, modify, and distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go
index e47a4e79..1fe3cf3d 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go
@@ -1,4 +1,4 @@
-// Copyright (c) 2015 Dave Collins
+// Copyright (c) 2015-2016 Dave Collins
//
// Permission to use, copy, modify, and distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/common.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/common.go
index 14f02dc1..7c519ff4 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/common.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/common.go
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2013 Dave Collins
+ * Copyright (c) 2013-2016 Dave Collins
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/config.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/config.go
index 55528272..2e3d22f3 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/config.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/config.go
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2013 Dave Collins
+ * Copyright (c) 2013-2016 Dave Collins
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
@@ -67,6 +67,15 @@ type ConfigState struct {
// Google App Engine or with the "safe" build tag specified.
DisablePointerMethods bool
+ // DisablePointerAddresses specifies whether to disable the printing of
+ // pointer addresses. This is useful when diffing data structures in tests.
+ DisablePointerAddresses bool
+
+ // DisableCapacities specifies whether to disable the printing of capacities
+ // for arrays, slices, maps and channels. This is useful when diffing
+ // data structures in tests.
+ DisableCapacities bool
+
// ContinueOnMethod specifies whether or not recursion should continue once
// a custom error or Stringer interface is invoked. The default, false,
// means it will print the results of invoking the custom error or Stringer
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/doc.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/doc.go
index 5be0c406..aacaac6f 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/doc.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/doc.go
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2013 Dave Collins
+ * Copyright (c) 2013-2016 Dave Collins
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
@@ -91,6 +91,15 @@ The following configuration options are available:
which only accept pointer receivers from non-pointer variables.
Pointer method invocation is enabled by default.
+ * DisablePointerAddresses
+ DisablePointerAddresses specifies whether to disable the printing of
+ pointer addresses. This is useful when diffing data structures in tests.
+
+ * DisableCapacities
+ DisableCapacities specifies whether to disable the printing of
+ capacities for arrays, slices, maps and channels. This is useful when
+ diffing data structures in tests.
+
* ContinueOnMethod
Enables recursion into types after invoking error and Stringer interface
methods. Recursion after method invocation is disabled by default.
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/dump.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/dump.go
index a0ff95e2..df1d582a 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/dump.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/dump.go
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2013 Dave Collins
+ * Copyright (c) 2013-2016 Dave Collins
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
@@ -129,7 +129,7 @@ func (d *dumpState) dumpPtr(v reflect.Value) {
d.w.Write(closeParenBytes)
// Display pointer information.
- if len(pointerChain) > 0 {
+ if !d.cs.DisablePointerAddresses && len(pointerChain) > 0 {
d.w.Write(openParenBytes)
for i, addr := range pointerChain {
if i > 0 {
@@ -282,13 +282,13 @@ func (d *dumpState) dump(v reflect.Value) {
case reflect.Map, reflect.String:
valueLen = v.Len()
}
- if valueLen != 0 || valueCap != 0 {
+ if valueLen != 0 || !d.cs.DisableCapacities && valueCap != 0 {
d.w.Write(openParenBytes)
if valueLen != 0 {
d.w.Write(lenEqualsBytes)
printInt(d.w, int64(valueLen), 10)
}
- if valueCap != 0 {
+ if !d.cs.DisableCapacities && valueCap != 0 {
if valueLen != 0 {
d.w.Write(spaceBytes)
}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/format.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/format.go
index ecf3b80e..c49875ba 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/format.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/format.go
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2013 Dave Collins
+ * Copyright (c) 2013-2016 Dave Collins
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/spew.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/spew.go
index d8233f54..32c0e338 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/spew.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/davecgh/go-spew/spew/spew.go
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2013 Dave Collins
+ * Copyright (c) 2013-2016 Dave Collins
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/stretchr/testify/assert/assertions.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/stretchr/testify/assert/assertions.go
index 835084ff..2feb4193 100644
--- a/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/stretchr/testify/assert/assertions.go
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/github.com/stretchr/testify/assert/assertions.go
@@ -18,10 +18,6 @@ import (
"github.com/pmezard/go-difflib/difflib"
)
-func init() {
- spew.Config.SortKeys = true
-}
-
// TestingT is an interface wrapper around *testing.T
type TestingT interface {
Errorf(format string, args ...interface{})
@@ -1043,8 +1039,8 @@ func diff(expected interface{}, actual interface{}) string {
return ""
}
- e := spew.Sdump(expected)
- a := spew.Sdump(actual)
+ e := spewConfig.Sdump(expected)
+ a := spewConfig.Sdump(actual)
diff, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{
A: difflib.SplitLines(e),
@@ -1058,3 +1054,10 @@ func diff(expected interface{}, actual interface{}) string {
return "\n\nDiff:\n" + diff
}
+
+var spewConfig = spew.ConfigState{
+ Indent: " ",
+ DisablePointerAddresses: true,
+ DisableCapacities: true,
+ SortKeys: true,
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/golang.org/x/sys/AUTHORS b/go/src/github.com/bitrise-core/bitrise-init/vendor/golang.org/x/sys/AUTHORS
new file mode 100644
index 00000000..15167cd7
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/golang.org/x/sys/AUTHORS
@@ -0,0 +1,3 @@
+# This source code refers to The Go Authors for copyright purposes.
+# The master list of authors is in the main Go distribution,
+# visible at http://tip.golang.org/AUTHORS.
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/golang.org/x/sys/CONTRIBUTORS b/go/src/github.com/bitrise-core/bitrise-init/vendor/golang.org/x/sys/CONTRIBUTORS
new file mode 100644
index 00000000..1c4577e9
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/golang.org/x/sys/CONTRIBUTORS
@@ -0,0 +1,3 @@
+# This source code was written by the Go contributors.
+# The master list of contributors is in the main Go distribution,
+# visible at http://tip.golang.org/CONTRIBUTORS.
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/LICENSE b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/LICENSE
new file mode 100644
index 00000000..a68e67f0
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/LICENSE
@@ -0,0 +1,188 @@
+
+Copyright (c) 2011-2014 - Canonical Inc.
+
+This software is licensed under the LGPLv3, included below.
+
+As a special exception to the GNU Lesser General Public License version 3
+("LGPL3"), the copyright holders of this Library give you permission to
+convey to a third party a Combined Work that links statically or dynamically
+to this Library without providing any Minimal Corresponding Source or
+Minimal Application Code as set out in 4d or providing the installation
+information set out in section 4e, provided that you comply with the other
+provisions of LGPL3 and provided that you meet, for the Application the
+terms and conditions of the license(s) which apply to the Application.
+
+Except as stated in this special exception, the provisions of LGPL3 will
+continue to comply in full to this Library. If you modify this Library, you
+may apply this exception to your version of this Library, but you are not
+obliged to do so. If you do not wish to do so, delete this exception
+statement from your version. This exception does not (and cannot) modify any
+license terms which apply to the Application, with which you must still
+comply.
+
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+
+ This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+ 0. Additional Definitions.
+
+ As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+ "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+ An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+ A "Combined Work" is a work produced by combining or linking an
+Application with the Library. The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+ The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+ The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+ 1. Exception to Section 3 of the GNU GPL.
+
+ You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+ 2. Conveying Modified Versions.
+
+ If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+ a) under this License, provided that you make a good faith effort to
+ ensure that, in the event an Application does not supply the
+ function or data, the facility still operates, and performs
+ whatever part of its purpose remains meaningful, or
+
+ b) under the GNU GPL, with none of the additional permissions of
+ this License applicable to that copy.
+
+ 3. Object Code Incorporating Material from Library Header Files.
+
+ The object code form of an Application may incorporate material from
+a header file that is part of the Library. You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+ a) Give prominent notice with each copy of the object code that the
+ Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the object code with a copy of the GNU GPL and this license
+ document.
+
+ 4. Combined Works.
+
+ You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+ a) Give prominent notice with each copy of the Combined Work that
+ the Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the Combined Work with a copy of the GNU GPL and this license
+ document.
+
+ c) For a Combined Work that displays copyright notices during
+ execution, include the copyright notice for the Library among
+ these notices, as well as a reference directing the user to the
+ copies of the GNU GPL and this license document.
+
+ d) Do one of the following:
+
+ 0) Convey the Minimal Corresponding Source under the terms of this
+ License, and the Corresponding Application Code in a form
+ suitable for, and under terms that permit, the user to
+ recombine or relink the Application with a modified version of
+ the Linked Version to produce a modified Combined Work, in the
+ manner specified by section 6 of the GNU GPL for conveying
+ Corresponding Source.
+
+ 1) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (a) uses at run time
+ a copy of the Library already present on the user's computer
+ system, and (b) will operate properly with a modified version
+ of the Library that is interface-compatible with the Linked
+ Version.
+
+ e) Provide Installation Information, but only if you would otherwise
+ be required to provide such information under section 6 of the
+ GNU GPL, and only to the extent that such information is
+ necessary to install and execute a modified version of the
+ Combined Work produced by recombining or relinking the
+ Application with a modified version of the Linked Version. (If
+ you use option 4d0, the Installation Information must accompany
+ the Minimal Corresponding Source and Corresponding Application
+ Code. If you use option 4d1, you must provide the Installation
+ Information in the manner specified by section 6 of the GNU GPL
+ for conveying Corresponding Source.)
+
+ 5. Combined Libraries.
+
+ You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+ a) Accompany the combined library with a copy of the same work based
+ on the Library, uncombined with any other library facilities,
+ conveyed under the terms of this License.
+
+ b) Give prominent notice with the combined library that part of it
+ is a work based on the Library, and explaining where to find the
+ accompanying uncombined form of the same work.
+
+ 6. Revised Versions of the GNU Lesser General Public License.
+
+ The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+ If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/LICENSE.libyaml b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/LICENSE.libyaml
new file mode 100644
index 00000000..8da58fbf
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/LICENSE.libyaml
@@ -0,0 +1,31 @@
+The following files were ported to Go from C files of libyaml, and thus
+are still covered by their original copyright and license:
+
+ apic.go
+ emitterc.go
+ parserc.go
+ readerc.go
+ scannerc.go
+ writerc.go
+ yamlh.go
+ yamlprivateh.go
+
+Copyright (c) 2006 Kirill Simonov
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/README.md b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/README.md
new file mode 100644
index 00000000..af070566
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/README.md
@@ -0,0 +1,128 @@
+# YAML support for the Go language
+
+Introduction
+------------
+
+The yaml package enables Go programs to comfortably encode and decode YAML
+values. It was developed within [Canonical](https://www.canonical.com) as
+part of the [juju](https://juju.ubuntu.com) project, and is based on a
+pure Go port of the well-known [libyaml](http://pyyaml.org/wiki/LibYAML)
+C library to parse and generate YAML data quickly and reliably.
+
+Compatibility
+-------------
+
+The yaml package supports most of YAML 1.1 and 1.2, including support for
+anchors, tags, map merging, etc. Multi-document unmarshalling is not yet
+implemented, and base-60 floats from YAML 1.1 are purposefully not
+supported since they're a poor design and are gone in YAML 1.2.
+
+Installation and usage
+----------------------
+
+The import path for the package is *gopkg.in/yaml.v1*.
+
+To install it, run:
+
+ go get gopkg.in/yaml.v1
+
+API documentation
+-----------------
+
+If opened in a browser, the import path itself leads to the API documentation:
+
+ * [https://gopkg.in/yaml.v1](https://gopkg.in/yaml.v1)
+
+API stability
+-------------
+
+The package API for yaml v1 will remain stable as described in [gopkg.in](https://gopkg.in).
+
+
+License
+-------
+
+The yaml package is licensed under the LGPL with an exception that allows it to be linked statically. Please see the LICENSE file for details.
+
+
+Example
+-------
+
+```Go
+package main
+
+import (
+ "fmt"
+ "log"
+
+ "gopkg.in/yaml.v1"
+)
+
+var data = `
+a: Easy!
+b:
+ c: 2
+ d: [3, 4]
+`
+
+type T struct {
+ A string
+ B struct{C int; D []int ",flow"}
+}
+
+func main() {
+ t := T{}
+
+ err := yaml.Unmarshal([]byte(data), &t)
+ if err != nil {
+ log.Fatalf("error: %v", err)
+ }
+ fmt.Printf("--- t:\n%v\n\n", t)
+
+ d, err := yaml.Marshal(&t)
+ if err != nil {
+ log.Fatalf("error: %v", err)
+ }
+ fmt.Printf("--- t dump:\n%s\n\n", string(d))
+
+ m := make(map[interface{}]interface{})
+
+ err = yaml.Unmarshal([]byte(data), &m)
+ if err != nil {
+ log.Fatalf("error: %v", err)
+ }
+ fmt.Printf("--- m:\n%v\n\n", m)
+
+ d, err = yaml.Marshal(&m)
+ if err != nil {
+ log.Fatalf("error: %v", err)
+ }
+ fmt.Printf("--- m dump:\n%s\n\n", string(d))
+}
+```
+
+This example will generate the following output:
+
+```
+--- t:
+{Easy! {2 [3 4]}}
+
+--- t dump:
+a: Easy!
+b:
+ c: 2
+ d: [3, 4]
+
+
+--- m:
+map[a:Easy! b:map[c:2 d:[3 4]]]
+
+--- m dump:
+a: Easy!
+b:
+ c: 2
+ d:
+ - 3
+ - 4
+```
+
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/apic.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/apic.go
new file mode 100644
index 00000000..95ec014e
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/apic.go
@@ -0,0 +1,742 @@
+package yaml
+
+import (
+ "io"
+ "os"
+)
+
+func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) {
+ //fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens))
+
+ // Check if we can move the queue at the beginning of the buffer.
+ if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) {
+ if parser.tokens_head != len(parser.tokens) {
+ copy(parser.tokens, parser.tokens[parser.tokens_head:])
+ }
+ parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head]
+ parser.tokens_head = 0
+ }
+ parser.tokens = append(parser.tokens, *token)
+ if pos < 0 {
+ return
+ }
+ copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:])
+ parser.tokens[parser.tokens_head+pos] = *token
+}
+
+// Create a new parser object.
+func yaml_parser_initialize(parser *yaml_parser_t) bool {
+ *parser = yaml_parser_t{
+ raw_buffer: make([]byte, 0, input_raw_buffer_size),
+ buffer: make([]byte, 0, input_buffer_size),
+ }
+ return true
+}
+
+// Destroy a parser object.
+func yaml_parser_delete(parser *yaml_parser_t) {
+ *parser = yaml_parser_t{}
+}
+
+// String read handler.
+func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
+ if parser.input_pos == len(parser.input) {
+ return 0, io.EOF
+ }
+ n = copy(buffer, parser.input[parser.input_pos:])
+ parser.input_pos += n
+ return n, nil
+}
+
+// File read handler.
+func yaml_file_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
+ return parser.input_file.Read(buffer)
+}
+
+// Set a string input.
+func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) {
+ if parser.read_handler != nil {
+ panic("must set the input source only once")
+ }
+ parser.read_handler = yaml_string_read_handler
+ parser.input = input
+ parser.input_pos = 0
+}
+
+// Set a file input.
+func yaml_parser_set_input_file(parser *yaml_parser_t, file *os.File) {
+ if parser.read_handler != nil {
+ panic("must set the input source only once")
+ }
+ parser.read_handler = yaml_file_read_handler
+ parser.input_file = file
+}
+
+// Set the source encoding.
+func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) {
+ if parser.encoding != yaml_ANY_ENCODING {
+ panic("must set the encoding only once")
+ }
+ parser.encoding = encoding
+}
+
+// Create a new emitter object.
+func yaml_emitter_initialize(emitter *yaml_emitter_t) bool {
+ *emitter = yaml_emitter_t{
+ buffer: make([]byte, output_buffer_size),
+ raw_buffer: make([]byte, 0, output_raw_buffer_size),
+ states: make([]yaml_emitter_state_t, 0, initial_stack_size),
+ events: make([]yaml_event_t, 0, initial_queue_size),
+ }
+ return true
+}
+
+// Destroy an emitter object.
+func yaml_emitter_delete(emitter *yaml_emitter_t) {
+ *emitter = yaml_emitter_t{}
+}
+
+// String write handler.
+func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
+ *emitter.output_buffer = append(*emitter.output_buffer, buffer...)
+ return nil
+}
+
+// File write handler.
+func yaml_file_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
+ _, err := emitter.output_file.Write(buffer)
+ return err
+}
+
+// Set a string output.
+func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) {
+ if emitter.write_handler != nil {
+ panic("must set the output target only once")
+ }
+ emitter.write_handler = yaml_string_write_handler
+ emitter.output_buffer = output_buffer
+}
+
+// Set a file output.
+func yaml_emitter_set_output_file(emitter *yaml_emitter_t, file io.Writer) {
+ if emitter.write_handler != nil {
+ panic("must set the output target only once")
+ }
+ emitter.write_handler = yaml_file_write_handler
+ emitter.output_file = file
+}
+
+// Set the output encoding.
+func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) {
+ if emitter.encoding != yaml_ANY_ENCODING {
+ panic("must set the output encoding only once")
+ }
+ emitter.encoding = encoding
+}
+
+// Set the canonical output style.
+func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) {
+ emitter.canonical = canonical
+}
+
+//// Set the indentation increment.
+func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) {
+ if indent < 2 || indent > 9 {
+ indent = 2
+ }
+ emitter.best_indent = indent
+}
+
+// Set the preferred line width.
+func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) {
+ if width < 0 {
+ width = -1
+ }
+ emitter.best_width = width
+}
+
+// Set if unescaped non-ASCII characters are allowed.
+func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) {
+ emitter.unicode = unicode
+}
+
+// Set the preferred line break character.
+func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) {
+ emitter.line_break = line_break
+}
+
+///*
+// * Destroy a token object.
+// */
+//
+//YAML_DECLARE(void)
+//yaml_token_delete(yaml_token_t *token)
+//{
+// assert(token); // Non-NULL token object expected.
+//
+// switch (token.type)
+// {
+// case YAML_TAG_DIRECTIVE_TOKEN:
+// yaml_free(token.data.tag_directive.handle);
+// yaml_free(token.data.tag_directive.prefix);
+// break;
+//
+// case YAML_ALIAS_TOKEN:
+// yaml_free(token.data.alias.value);
+// break;
+//
+// case YAML_ANCHOR_TOKEN:
+// yaml_free(token.data.anchor.value);
+// break;
+//
+// case YAML_TAG_TOKEN:
+// yaml_free(token.data.tag.handle);
+// yaml_free(token.data.tag.suffix);
+// break;
+//
+// case YAML_SCALAR_TOKEN:
+// yaml_free(token.data.scalar.value);
+// break;
+//
+// default:
+// break;
+// }
+//
+// memset(token, 0, sizeof(yaml_token_t));
+//}
+//
+///*
+// * Check if a string is a valid UTF-8 sequence.
+// *
+// * Check 'reader.c' for more details on UTF-8 encoding.
+// */
+//
+//static int
+//yaml_check_utf8(yaml_char_t *start, size_t length)
+//{
+// yaml_char_t *end = start+length;
+// yaml_char_t *pointer = start;
+//
+// while (pointer < end) {
+// unsigned char octet;
+// unsigned int width;
+// unsigned int value;
+// size_t k;
+//
+// octet = pointer[0];
+// width = (octet & 0x80) == 0x00 ? 1 :
+// (octet & 0xE0) == 0xC0 ? 2 :
+// (octet & 0xF0) == 0xE0 ? 3 :
+// (octet & 0xF8) == 0xF0 ? 4 : 0;
+// value = (octet & 0x80) == 0x00 ? octet & 0x7F :
+// (octet & 0xE0) == 0xC0 ? octet & 0x1F :
+// (octet & 0xF0) == 0xE0 ? octet & 0x0F :
+// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0;
+// if (!width) return 0;
+// if (pointer+width > end) return 0;
+// for (k = 1; k < width; k ++) {
+// octet = pointer[k];
+// if ((octet & 0xC0) != 0x80) return 0;
+// value = (value << 6) + (octet & 0x3F);
+// }
+// if (!((width == 1) ||
+// (width == 2 && value >= 0x80) ||
+// (width == 3 && value >= 0x800) ||
+// (width == 4 && value >= 0x10000))) return 0;
+//
+// pointer += width;
+// }
+//
+// return 1;
+//}
+//
+
+// Create STREAM-START.
+func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) bool {
+ *event = yaml_event_t{
+ typ: yaml_STREAM_START_EVENT,
+ encoding: encoding,
+ }
+ return true
+}
+
+// Create STREAM-END.
+func yaml_stream_end_event_initialize(event *yaml_event_t) bool {
+ *event = yaml_event_t{
+ typ: yaml_STREAM_END_EVENT,
+ }
+ return true
+}
+
+// Create DOCUMENT-START.
+func yaml_document_start_event_initialize(event *yaml_event_t, version_directive *yaml_version_directive_t,
+ tag_directives []yaml_tag_directive_t, implicit bool) bool {
+ *event = yaml_event_t{
+ typ: yaml_DOCUMENT_START_EVENT,
+ version_directive: version_directive,
+ tag_directives: tag_directives,
+ implicit: implicit,
+ }
+ return true
+}
+
+// Create DOCUMENT-END.
+func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) bool {
+ *event = yaml_event_t{
+ typ: yaml_DOCUMENT_END_EVENT,
+ implicit: implicit,
+ }
+ return true
+}
+
+///*
+// * Create ALIAS.
+// */
+//
+//YAML_DECLARE(int)
+//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t)
+//{
+// mark yaml_mark_t = { 0, 0, 0 }
+// anchor_copy *yaml_char_t = NULL
+//
+// assert(event) // Non-NULL event object is expected.
+// assert(anchor) // Non-NULL anchor is expected.
+//
+// if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0
+//
+// anchor_copy = yaml_strdup(anchor)
+// if (!anchor_copy)
+// return 0
+//
+// ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark)
+//
+// return 1
+//}
+
+// Create SCALAR.
+func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool {
+ *event = yaml_event_t{
+ typ: yaml_SCALAR_EVENT,
+ anchor: anchor,
+ tag: tag,
+ value: value,
+ implicit: plain_implicit,
+ quoted_implicit: quoted_implicit,
+ style: yaml_style_t(style),
+ }
+ return true
+}
+
+// Create SEQUENCE-START.
+func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool {
+ *event = yaml_event_t{
+ typ: yaml_SEQUENCE_START_EVENT,
+ anchor: anchor,
+ tag: tag,
+ implicit: implicit,
+ style: yaml_style_t(style),
+ }
+ return true
+}
+
+// Create SEQUENCE-END.
+func yaml_sequence_end_event_initialize(event *yaml_event_t) bool {
+ *event = yaml_event_t{
+ typ: yaml_SEQUENCE_END_EVENT,
+ }
+ return true
+}
+
+// Create MAPPING-START.
+func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) bool {
+ *event = yaml_event_t{
+ typ: yaml_MAPPING_START_EVENT,
+ anchor: anchor,
+ tag: tag,
+ implicit: implicit,
+ style: yaml_style_t(style),
+ }
+ return true
+}
+
+// Create MAPPING-END.
+func yaml_mapping_end_event_initialize(event *yaml_event_t) bool {
+ *event = yaml_event_t{
+ typ: yaml_MAPPING_END_EVENT,
+ }
+ return true
+}
+
+// Destroy an event object.
+func yaml_event_delete(event *yaml_event_t) {
+ *event = yaml_event_t{}
+}
+
+///*
+// * Create a document object.
+// */
+//
+//YAML_DECLARE(int)
+//yaml_document_initialize(document *yaml_document_t,
+// version_directive *yaml_version_directive_t,
+// tag_directives_start *yaml_tag_directive_t,
+// tag_directives_end *yaml_tag_directive_t,
+// start_implicit int, end_implicit int)
+//{
+// struct {
+// error yaml_error_type_t
+// } context
+// struct {
+// start *yaml_node_t
+// end *yaml_node_t
+// top *yaml_node_t
+// } nodes = { NULL, NULL, NULL }
+// version_directive_copy *yaml_version_directive_t = NULL
+// struct {
+// start *yaml_tag_directive_t
+// end *yaml_tag_directive_t
+// top *yaml_tag_directive_t
+// } tag_directives_copy = { NULL, NULL, NULL }
+// value yaml_tag_directive_t = { NULL, NULL }
+// mark yaml_mark_t = { 0, 0, 0 }
+//
+// assert(document) // Non-NULL document object is expected.
+// assert((tag_directives_start && tag_directives_end) ||
+// (tag_directives_start == tag_directives_end))
+// // Valid tag directives are expected.
+//
+// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error
+//
+// if (version_directive) {
+// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t))
+// if (!version_directive_copy) goto error
+// version_directive_copy.major = version_directive.major
+// version_directive_copy.minor = version_directive.minor
+// }
+//
+// if (tag_directives_start != tag_directives_end) {
+// tag_directive *yaml_tag_directive_t
+// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE))
+// goto error
+// for (tag_directive = tag_directives_start
+// tag_directive != tag_directives_end; tag_directive ++) {
+// assert(tag_directive.handle)
+// assert(tag_directive.prefix)
+// if (!yaml_check_utf8(tag_directive.handle,
+// strlen((char *)tag_directive.handle)))
+// goto error
+// if (!yaml_check_utf8(tag_directive.prefix,
+// strlen((char *)tag_directive.prefix)))
+// goto error
+// value.handle = yaml_strdup(tag_directive.handle)
+// value.prefix = yaml_strdup(tag_directive.prefix)
+// if (!value.handle || !value.prefix) goto error
+// if (!PUSH(&context, tag_directives_copy, value))
+// goto error
+// value.handle = NULL
+// value.prefix = NULL
+// }
+// }
+//
+// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy,
+// tag_directives_copy.start, tag_directives_copy.top,
+// start_implicit, end_implicit, mark, mark)
+//
+// return 1
+//
+//error:
+// STACK_DEL(&context, nodes)
+// yaml_free(version_directive_copy)
+// while (!STACK_EMPTY(&context, tag_directives_copy)) {
+// value yaml_tag_directive_t = POP(&context, tag_directives_copy)
+// yaml_free(value.handle)
+// yaml_free(value.prefix)
+// }
+// STACK_DEL(&context, tag_directives_copy)
+// yaml_free(value.handle)
+// yaml_free(value.prefix)
+//
+// return 0
+//}
+//
+///*
+// * Destroy a document object.
+// */
+//
+//YAML_DECLARE(void)
+//yaml_document_delete(document *yaml_document_t)
+//{
+// struct {
+// error yaml_error_type_t
+// } context
+// tag_directive *yaml_tag_directive_t
+//
+// context.error = YAML_NO_ERROR // Eliminate a compliler warning.
+//
+// assert(document) // Non-NULL document object is expected.
+//
+// while (!STACK_EMPTY(&context, document.nodes)) {
+// node yaml_node_t = POP(&context, document.nodes)
+// yaml_free(node.tag)
+// switch (node.type) {
+// case YAML_SCALAR_NODE:
+// yaml_free(node.data.scalar.value)
+// break
+// case YAML_SEQUENCE_NODE:
+// STACK_DEL(&context, node.data.sequence.items)
+// break
+// case YAML_MAPPING_NODE:
+// STACK_DEL(&context, node.data.mapping.pairs)
+// break
+// default:
+// assert(0) // Should not happen.
+// }
+// }
+// STACK_DEL(&context, document.nodes)
+//
+// yaml_free(document.version_directive)
+// for (tag_directive = document.tag_directives.start
+// tag_directive != document.tag_directives.end
+// tag_directive++) {
+// yaml_free(tag_directive.handle)
+// yaml_free(tag_directive.prefix)
+// }
+// yaml_free(document.tag_directives.start)
+//
+// memset(document, 0, sizeof(yaml_document_t))
+//}
+//
+///**
+// * Get a document node.
+// */
+//
+//YAML_DECLARE(yaml_node_t *)
+//yaml_document_get_node(document *yaml_document_t, index int)
+//{
+// assert(document) // Non-NULL document object is expected.
+//
+// if (index > 0 && document.nodes.start + index <= document.nodes.top) {
+// return document.nodes.start + index - 1
+// }
+// return NULL
+//}
+//
+///**
+// * Get the root object.
+// */
+//
+//YAML_DECLARE(yaml_node_t *)
+//yaml_document_get_root_node(document *yaml_document_t)
+//{
+// assert(document) // Non-NULL document object is expected.
+//
+// if (document.nodes.top != document.nodes.start) {
+// return document.nodes.start
+// }
+// return NULL
+//}
+//
+///*
+// * Add a scalar node to a document.
+// */
+//
+//YAML_DECLARE(int)
+//yaml_document_add_scalar(document *yaml_document_t,
+// tag *yaml_char_t, value *yaml_char_t, length int,
+// style yaml_scalar_style_t)
+//{
+// struct {
+// error yaml_error_type_t
+// } context
+// mark yaml_mark_t = { 0, 0, 0 }
+// tag_copy *yaml_char_t = NULL
+// value_copy *yaml_char_t = NULL
+// node yaml_node_t
+//
+// assert(document) // Non-NULL document object is expected.
+// assert(value) // Non-NULL value is expected.
+//
+// if (!tag) {
+// tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG
+// }
+//
+// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
+// tag_copy = yaml_strdup(tag)
+// if (!tag_copy) goto error
+//
+// if (length < 0) {
+// length = strlen((char *)value)
+// }
+//
+// if (!yaml_check_utf8(value, length)) goto error
+// value_copy = yaml_malloc(length+1)
+// if (!value_copy) goto error
+// memcpy(value_copy, value, length)
+// value_copy[length] = '\0'
+//
+// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark)
+// if (!PUSH(&context, document.nodes, node)) goto error
+//
+// return document.nodes.top - document.nodes.start
+//
+//error:
+// yaml_free(tag_copy)
+// yaml_free(value_copy)
+//
+// return 0
+//}
+//
+///*
+// * Add a sequence node to a document.
+// */
+//
+//YAML_DECLARE(int)
+//yaml_document_add_sequence(document *yaml_document_t,
+// tag *yaml_char_t, style yaml_sequence_style_t)
+//{
+// struct {
+// error yaml_error_type_t
+// } context
+// mark yaml_mark_t = { 0, 0, 0 }
+// tag_copy *yaml_char_t = NULL
+// struct {
+// start *yaml_node_item_t
+// end *yaml_node_item_t
+// top *yaml_node_item_t
+// } items = { NULL, NULL, NULL }
+// node yaml_node_t
+//
+// assert(document) // Non-NULL document object is expected.
+//
+// if (!tag) {
+// tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG
+// }
+//
+// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
+// tag_copy = yaml_strdup(tag)
+// if (!tag_copy) goto error
+//
+// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error
+//
+// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end,
+// style, mark, mark)
+// if (!PUSH(&context, document.nodes, node)) goto error
+//
+// return document.nodes.top - document.nodes.start
+//
+//error:
+// STACK_DEL(&context, items)
+// yaml_free(tag_copy)
+//
+// return 0
+//}
+//
+///*
+// * Add a mapping node to a document.
+// */
+//
+//YAML_DECLARE(int)
+//yaml_document_add_mapping(document *yaml_document_t,
+// tag *yaml_char_t, style yaml_mapping_style_t)
+//{
+// struct {
+// error yaml_error_type_t
+// } context
+// mark yaml_mark_t = { 0, 0, 0 }
+// tag_copy *yaml_char_t = NULL
+// struct {
+// start *yaml_node_pair_t
+// end *yaml_node_pair_t
+// top *yaml_node_pair_t
+// } pairs = { NULL, NULL, NULL }
+// node yaml_node_t
+//
+// assert(document) // Non-NULL document object is expected.
+//
+// if (!tag) {
+// tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG
+// }
+//
+// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
+// tag_copy = yaml_strdup(tag)
+// if (!tag_copy) goto error
+//
+// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error
+//
+// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end,
+// style, mark, mark)
+// if (!PUSH(&context, document.nodes, node)) goto error
+//
+// return document.nodes.top - document.nodes.start
+//
+//error:
+// STACK_DEL(&context, pairs)
+// yaml_free(tag_copy)
+//
+// return 0
+//}
+//
+///*
+// * Append an item to a sequence node.
+// */
+//
+//YAML_DECLARE(int)
+//yaml_document_append_sequence_item(document *yaml_document_t,
+// sequence int, item int)
+//{
+// struct {
+// error yaml_error_type_t
+// } context
+//
+// assert(document) // Non-NULL document is required.
+// assert(sequence > 0
+// && document.nodes.start + sequence <= document.nodes.top)
+// // Valid sequence id is required.
+// assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE)
+// // A sequence node is required.
+// assert(item > 0 && document.nodes.start + item <= document.nodes.top)
+// // Valid item id is required.
+//
+// if (!PUSH(&context,
+// document.nodes.start[sequence-1].data.sequence.items, item))
+// return 0
+//
+// return 1
+//}
+//
+///*
+// * Append a pair of a key and a value to a mapping node.
+// */
+//
+//YAML_DECLARE(int)
+//yaml_document_append_mapping_pair(document *yaml_document_t,
+// mapping int, key int, value int)
+//{
+// struct {
+// error yaml_error_type_t
+// } context
+//
+// pair yaml_node_pair_t
+//
+// assert(document) // Non-NULL document is required.
+// assert(mapping > 0
+// && document.nodes.start + mapping <= document.nodes.top)
+// // Valid mapping id is required.
+// assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE)
+// // A mapping node is required.
+// assert(key > 0 && document.nodes.start + key <= document.nodes.top)
+// // Valid key id is required.
+// assert(value > 0 && document.nodes.start + value <= document.nodes.top)
+// // Valid value id is required.
+//
+// pair.key = key
+// pair.value = value
+//
+// if (!PUSH(&context,
+// document.nodes.start[mapping-1].data.mapping.pairs, pair))
+// return 0
+//
+// return 1
+//}
+//
+//
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/decode.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/decode.go
new file mode 100644
index 00000000..a098626d
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/decode.go
@@ -0,0 +1,566 @@
+package yaml
+
+import (
+ "encoding/base64"
+ "fmt"
+ "reflect"
+ "strconv"
+ "time"
+)
+
+const (
+ documentNode = 1 << iota
+ mappingNode
+ sequenceNode
+ scalarNode
+ aliasNode
+)
+
+type node struct {
+ kind int
+ line, column int
+ tag string
+ value string
+ implicit bool
+ children []*node
+ anchors map[string]*node
+}
+
+// ----------------------------------------------------------------------------
+// Parser, produces a node tree out of a libyaml event stream.
+
+type parser struct {
+ parser yaml_parser_t
+ event yaml_event_t
+ doc *node
+}
+
+func newParser(b []byte) *parser {
+ p := parser{}
+ if !yaml_parser_initialize(&p.parser) {
+ panic("Failed to initialize YAML emitter")
+ }
+
+ if len(b) == 0 {
+ b = []byte{'\n'}
+ }
+
+ yaml_parser_set_input_string(&p.parser, b)
+
+ p.skip()
+ if p.event.typ != yaml_STREAM_START_EVENT {
+ panic("Expected stream start event, got " + strconv.Itoa(int(p.event.typ)))
+ }
+ p.skip()
+ return &p
+}
+
+func (p *parser) destroy() {
+ if p.event.typ != yaml_NO_EVENT {
+ yaml_event_delete(&p.event)
+ }
+ yaml_parser_delete(&p.parser)
+}
+
+func (p *parser) skip() {
+ if p.event.typ != yaml_NO_EVENT {
+ if p.event.typ == yaml_STREAM_END_EVENT {
+ fail("Attempted to go past the end of stream. Corrupted value?")
+ }
+ yaml_event_delete(&p.event)
+ }
+ if !yaml_parser_parse(&p.parser, &p.event) {
+ p.fail()
+ }
+}
+
+func (p *parser) fail() {
+ var where string
+ var line int
+ if p.parser.problem_mark.line != 0 {
+ line = p.parser.problem_mark.line
+ } else if p.parser.context_mark.line != 0 {
+ line = p.parser.context_mark.line
+ }
+ if line != 0 {
+ where = "line " + strconv.Itoa(line) + ": "
+ }
+ var msg string
+ if len(p.parser.problem) > 0 {
+ msg = p.parser.problem
+ } else {
+ msg = "Unknown problem parsing YAML content"
+ }
+ fail(where + msg)
+}
+
+func (p *parser) anchor(n *node, anchor []byte) {
+ if anchor != nil {
+ p.doc.anchors[string(anchor)] = n
+ }
+}
+
+func (p *parser) parse() *node {
+ switch p.event.typ {
+ case yaml_SCALAR_EVENT:
+ return p.scalar()
+ case yaml_ALIAS_EVENT:
+ return p.alias()
+ case yaml_MAPPING_START_EVENT:
+ return p.mapping()
+ case yaml_SEQUENCE_START_EVENT:
+ return p.sequence()
+ case yaml_DOCUMENT_START_EVENT:
+ return p.document()
+ case yaml_STREAM_END_EVENT:
+ // Happens when attempting to decode an empty buffer.
+ return nil
+ default:
+ panic("Attempted to parse unknown event: " + strconv.Itoa(int(p.event.typ)))
+ }
+ panic("unreachable")
+}
+
+func (p *parser) node(kind int) *node {
+ return &node{
+ kind: kind,
+ line: p.event.start_mark.line,
+ column: p.event.start_mark.column,
+ }
+}
+
+func (p *parser) document() *node {
+ n := p.node(documentNode)
+ n.anchors = make(map[string]*node)
+ p.doc = n
+ p.skip()
+ n.children = append(n.children, p.parse())
+ if p.event.typ != yaml_DOCUMENT_END_EVENT {
+ panic("Expected end of document event but got " + strconv.Itoa(int(p.event.typ)))
+ }
+ p.skip()
+ return n
+}
+
+func (p *parser) alias() *node {
+ n := p.node(aliasNode)
+ n.value = string(p.event.anchor)
+ p.skip()
+ return n
+}
+
+func (p *parser) scalar() *node {
+ n := p.node(scalarNode)
+ n.value = string(p.event.value)
+ n.tag = string(p.event.tag)
+ n.implicit = p.event.implicit
+ p.anchor(n, p.event.anchor)
+ p.skip()
+ return n
+}
+
+func (p *parser) sequence() *node {
+ n := p.node(sequenceNode)
+ p.anchor(n, p.event.anchor)
+ p.skip()
+ for p.event.typ != yaml_SEQUENCE_END_EVENT {
+ n.children = append(n.children, p.parse())
+ }
+ p.skip()
+ return n
+}
+
+func (p *parser) mapping() *node {
+ n := p.node(mappingNode)
+ p.anchor(n, p.event.anchor)
+ p.skip()
+ for p.event.typ != yaml_MAPPING_END_EVENT {
+ n.children = append(n.children, p.parse(), p.parse())
+ }
+ p.skip()
+ return n
+}
+
+// ----------------------------------------------------------------------------
+// Decoder, unmarshals a node into a provided value.
+
+type decoder struct {
+ doc *node
+ aliases map[string]bool
+}
+
+func newDecoder() *decoder {
+ d := &decoder{}
+ d.aliases = make(map[string]bool)
+ return d
+}
+
+// d.setter deals with setters and pointer dereferencing and initialization.
+//
+// It's a slightly convoluted case to handle properly:
+//
+// - nil pointers should be initialized, unless being set to nil
+// - we don't know at this point yet what's the value to SetYAML() with.
+// - we can't separate pointer deref/init and setter checking, because
+// a setter may be found while going down a pointer chain.
+//
+// Thus, here is how it takes care of it:
+//
+// - out is provided as a pointer, so that it can be replaced.
+// - when looking at a non-setter ptr, *out=ptr.Elem(), unless tag=!!null
+// - when a setter is found, *out=interface{}, and a set() function is
+// returned to call SetYAML() with the value of *out once it's defined.
+//
+func (d *decoder) setter(tag string, out *reflect.Value, good *bool) (set func()) {
+ if (*out).Kind() != reflect.Ptr && (*out).CanAddr() {
+ setter, _ := (*out).Addr().Interface().(Setter)
+ if setter != nil {
+ var arg interface{}
+ *out = reflect.ValueOf(&arg).Elem()
+ return func() {
+ *good = setter.SetYAML(shortTag(tag), arg)
+ }
+ }
+ }
+ again := true
+ for again {
+ again = false
+ setter, _ := (*out).Interface().(Setter)
+ if tag != yaml_NULL_TAG || setter != nil {
+ if pv := (*out); pv.Kind() == reflect.Ptr {
+ if pv.IsNil() {
+ *out = reflect.New(pv.Type().Elem()).Elem()
+ pv.Set((*out).Addr())
+ } else {
+ *out = pv.Elem()
+ }
+ setter, _ = pv.Interface().(Setter)
+ again = true
+ }
+ }
+ if setter != nil {
+ var arg interface{}
+ *out = reflect.ValueOf(&arg).Elem()
+ return func() {
+ *good = setter.SetYAML(shortTag(tag), arg)
+ }
+ }
+ }
+ return nil
+}
+
+func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) {
+ switch n.kind {
+ case documentNode:
+ good = d.document(n, out)
+ case scalarNode:
+ good = d.scalar(n, out)
+ case aliasNode:
+ good = d.alias(n, out)
+ case mappingNode:
+ good = d.mapping(n, out)
+ case sequenceNode:
+ good = d.sequence(n, out)
+ default:
+ panic("Internal error: unknown node kind: " + strconv.Itoa(n.kind))
+ }
+ return
+}
+
+func (d *decoder) document(n *node, out reflect.Value) (good bool) {
+ if len(n.children) == 1 {
+ d.doc = n
+ d.unmarshal(n.children[0], out)
+ return true
+ }
+ return false
+}
+
+func (d *decoder) alias(n *node, out reflect.Value) (good bool) {
+ an, ok := d.doc.anchors[n.value]
+ if !ok {
+ fail("Unknown anchor '" + n.value + "' referenced")
+ }
+ if d.aliases[n.value] {
+ fail("Anchor '" + n.value + "' value contains itself")
+ }
+ d.aliases[n.value] = true
+ good = d.unmarshal(an, out)
+ delete(d.aliases, n.value)
+ return good
+}
+
+var zeroValue reflect.Value
+
+func resetMap(out reflect.Value) {
+ for _, k := range out.MapKeys() {
+ out.SetMapIndex(k, zeroValue)
+ }
+}
+
+var durationType = reflect.TypeOf(time.Duration(0))
+
+func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
+ var tag string
+ var resolved interface{}
+ if n.tag == "" && !n.implicit {
+ tag = yaml_STR_TAG
+ resolved = n.value
+ } else {
+ tag, resolved = resolve(n.tag, n.value)
+ if tag == yaml_BINARY_TAG {
+ data, err := base64.StdEncoding.DecodeString(resolved.(string))
+ if err != nil {
+ fail("!!binary value contains invalid base64 data")
+ }
+ resolved = string(data)
+ }
+ }
+ if set := d.setter(tag, &out, &good); set != nil {
+ defer set()
+ }
+ if resolved == nil {
+ if out.Kind() == reflect.Map && !out.CanAddr() {
+ resetMap(out)
+ } else {
+ out.Set(reflect.Zero(out.Type()))
+ }
+ good = true
+ return
+ }
+ switch out.Kind() {
+ case reflect.String:
+ if tag == yaml_BINARY_TAG {
+ out.SetString(resolved.(string))
+ good = true
+ } else if resolved != nil {
+ out.SetString(n.value)
+ good = true
+ }
+ case reflect.Interface:
+ if resolved == nil {
+ out.Set(reflect.Zero(out.Type()))
+ } else {
+ out.Set(reflect.ValueOf(resolved))
+ }
+ good = true
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ switch resolved := resolved.(type) {
+ case int:
+ if !out.OverflowInt(int64(resolved)) {
+ out.SetInt(int64(resolved))
+ good = true
+ }
+ case int64:
+ if !out.OverflowInt(resolved) {
+ out.SetInt(resolved)
+ good = true
+ }
+ case float64:
+ if resolved < 1<<63-1 && !out.OverflowInt(int64(resolved)) {
+ out.SetInt(int64(resolved))
+ good = true
+ }
+ case string:
+ if out.Type() == durationType {
+ d, err := time.ParseDuration(resolved)
+ if err == nil {
+ out.SetInt(int64(d))
+ good = true
+ }
+ }
+ }
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ switch resolved := resolved.(type) {
+ case int:
+ if resolved >= 0 {
+ out.SetUint(uint64(resolved))
+ good = true
+ }
+ case int64:
+ if resolved >= 0 {
+ out.SetUint(uint64(resolved))
+ good = true
+ }
+ case float64:
+ if resolved < 1<<64-1 && !out.OverflowUint(uint64(resolved)) {
+ out.SetUint(uint64(resolved))
+ good = true
+ }
+ }
+ case reflect.Bool:
+ switch resolved := resolved.(type) {
+ case bool:
+ out.SetBool(resolved)
+ good = true
+ }
+ case reflect.Float32, reflect.Float64:
+ switch resolved := resolved.(type) {
+ case int:
+ out.SetFloat(float64(resolved))
+ good = true
+ case int64:
+ out.SetFloat(float64(resolved))
+ good = true
+ case float64:
+ out.SetFloat(resolved)
+ good = true
+ }
+ case reflect.Ptr:
+ if out.Type().Elem() == reflect.TypeOf(resolved) {
+ elem := reflect.New(out.Type().Elem())
+ elem.Elem().Set(reflect.ValueOf(resolved))
+ out.Set(elem)
+ good = true
+ }
+ }
+ return good
+}
+
+func settableValueOf(i interface{}) reflect.Value {
+ v := reflect.ValueOf(i)
+ sv := reflect.New(v.Type()).Elem()
+ sv.Set(v)
+ return sv
+}
+
+func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
+ if set := d.setter(yaml_SEQ_TAG, &out, &good); set != nil {
+ defer set()
+ }
+ var iface reflect.Value
+ if out.Kind() == reflect.Interface {
+ // No type hints. Will have to use a generic sequence.
+ iface = out
+ out = settableValueOf(make([]interface{}, 0))
+ }
+
+ if out.Kind() != reflect.Slice {
+ return false
+ }
+ et := out.Type().Elem()
+
+ l := len(n.children)
+ for i := 0; i < l; i++ {
+ e := reflect.New(et).Elem()
+ if ok := d.unmarshal(n.children[i], e); ok {
+ out.Set(reflect.Append(out, e))
+ }
+ }
+ if iface.IsValid() {
+ iface.Set(out)
+ }
+ return true
+}
+
+func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
+ if set := d.setter(yaml_MAP_TAG, &out, &good); set != nil {
+ defer set()
+ }
+ if out.Kind() == reflect.Struct {
+ return d.mappingStruct(n, out)
+ }
+
+ if out.Kind() == reflect.Interface {
+ // No type hints. Will have to use a generic map.
+ iface := out
+ out = settableValueOf(make(map[interface{}]interface{}))
+ iface.Set(out)
+ }
+
+ if out.Kind() != reflect.Map {
+ return false
+ }
+ outt := out.Type()
+ kt := outt.Key()
+ et := outt.Elem()
+
+ if out.IsNil() {
+ out.Set(reflect.MakeMap(outt))
+ }
+ l := len(n.children)
+ for i := 0; i < l; i += 2 {
+ if isMerge(n.children[i]) {
+ d.merge(n.children[i+1], out)
+ continue
+ }
+ k := reflect.New(kt).Elem()
+ if d.unmarshal(n.children[i], k) {
+ kkind := k.Kind()
+ if kkind == reflect.Interface {
+ kkind = k.Elem().Kind()
+ }
+ if kkind == reflect.Map || kkind == reflect.Slice {
+ fail(fmt.Sprintf("invalid map key: %#v", k.Interface()))
+ }
+ e := reflect.New(et).Elem()
+ if d.unmarshal(n.children[i+1], e) {
+ out.SetMapIndex(k, e)
+ }
+ }
+ }
+ return true
+}
+
+func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
+ sinfo, err := getStructInfo(out.Type())
+ if err != nil {
+ panic(err)
+ }
+ name := settableValueOf("")
+ l := len(n.children)
+ for i := 0; i < l; i += 2 {
+ ni := n.children[i]
+ if isMerge(ni) {
+ d.merge(n.children[i+1], out)
+ continue
+ }
+ if !d.unmarshal(ni, name) {
+ continue
+ }
+ if info, ok := sinfo.FieldsMap[name.String()]; ok {
+ var field reflect.Value
+ if info.Inline == nil {
+ field = out.Field(info.Num)
+ } else {
+ field = out.FieldByIndex(info.Inline)
+ }
+ d.unmarshal(n.children[i+1], field)
+ }
+ }
+ return true
+}
+
+func (d *decoder) merge(n *node, out reflect.Value) {
+ const wantMap = "map merge requires map or sequence of maps as the value"
+ switch n.kind {
+ case mappingNode:
+ d.unmarshal(n, out)
+ case aliasNode:
+ an, ok := d.doc.anchors[n.value]
+ if ok && an.kind != mappingNode {
+ fail(wantMap)
+ }
+ d.unmarshal(n, out)
+ case sequenceNode:
+ // Step backwards as earlier nodes take precedence.
+ for i := len(n.children) - 1; i >= 0; i-- {
+ ni := n.children[i]
+ if ni.kind == aliasNode {
+ an, ok := d.doc.anchors[ni.value]
+ if ok && an.kind != mappingNode {
+ fail(wantMap)
+ }
+ } else if ni.kind != mappingNode {
+ fail(wantMap)
+ }
+ d.unmarshal(ni, out)
+ }
+ default:
+ fail(wantMap)
+ }
+}
+
+func isMerge(n *node) bool {
+ return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG)
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/emitterc.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/emitterc.go
new file mode 100644
index 00000000..9b3dc4a4
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/emitterc.go
@@ -0,0 +1,1685 @@
+package yaml
+
+import (
+ "bytes"
+)
+
+// Flush the buffer if needed.
+func flush(emitter *yaml_emitter_t) bool {
+ if emitter.buffer_pos+5 >= len(emitter.buffer) {
+ return yaml_emitter_flush(emitter)
+ }
+ return true
+}
+
+// Put a character to the output buffer.
+func put(emitter *yaml_emitter_t, value byte) bool {
+ if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) {
+ return false
+ }
+ emitter.buffer[emitter.buffer_pos] = value
+ emitter.buffer_pos++
+ emitter.column++
+ return true
+}
+
+// Put a line break to the output buffer.
+func put_break(emitter *yaml_emitter_t) bool {
+ if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) {
+ return false
+ }
+ switch emitter.line_break {
+ case yaml_CR_BREAK:
+ emitter.buffer[emitter.buffer_pos] = '\r'
+ emitter.buffer_pos += 1
+ case yaml_LN_BREAK:
+ emitter.buffer[emitter.buffer_pos] = '\n'
+ emitter.buffer_pos += 1
+ case yaml_CRLN_BREAK:
+ emitter.buffer[emitter.buffer_pos+0] = '\r'
+ emitter.buffer[emitter.buffer_pos+1] = '\n'
+ emitter.buffer_pos += 2
+ default:
+ panic("unknown line break setting")
+ }
+ emitter.column = 0
+ emitter.line++
+ return true
+}
+
+// Copy a character from a string into buffer.
+func write(emitter *yaml_emitter_t, s []byte, i *int) bool {
+ if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) {
+ return false
+ }
+ p := emitter.buffer_pos
+ w := width(s[*i])
+ switch w {
+ case 4:
+ emitter.buffer[p+3] = s[*i+3]
+ fallthrough
+ case 3:
+ emitter.buffer[p+2] = s[*i+2]
+ fallthrough
+ case 2:
+ emitter.buffer[p+1] = s[*i+1]
+ fallthrough
+ case 1:
+ emitter.buffer[p+0] = s[*i+0]
+ default:
+ panic("unknown character width")
+ }
+ emitter.column++
+ emitter.buffer_pos += w
+ *i += w
+ return true
+}
+
+// Write a whole string into buffer.
+func write_all(emitter *yaml_emitter_t, s []byte) bool {
+ for i := 0; i < len(s); {
+ if !write(emitter, s, &i) {
+ return false
+ }
+ }
+ return true
+}
+
+// Copy a line break character from a string into buffer.
+func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool {
+ if s[*i] == '\n' {
+ if !put_break(emitter) {
+ return false
+ }
+ *i++
+ } else {
+ if !write(emitter, s, i) {
+ return false
+ }
+ emitter.column = 0
+ emitter.line++
+ }
+ return true
+}
+
+// Set an emitter error and return false.
+func yaml_emitter_set_emitter_error(emitter *yaml_emitter_t, problem string) bool {
+ emitter.error = yaml_EMITTER_ERROR
+ emitter.problem = problem
+ return false
+}
+
+// Emit an event.
+func yaml_emitter_emit(emitter *yaml_emitter_t, event *yaml_event_t) bool {
+ emitter.events = append(emitter.events, *event)
+ for !yaml_emitter_need_more_events(emitter) {
+ event := &emitter.events[emitter.events_head]
+ if !yaml_emitter_analyze_event(emitter, event) {
+ return false
+ }
+ if !yaml_emitter_state_machine(emitter, event) {
+ return false
+ }
+ yaml_event_delete(event)
+ emitter.events_head++
+ }
+ return true
+}
+
+// Check if we need to accumulate more events before emitting.
+//
+// We accumulate extra
+// - 1 event for DOCUMENT-START
+// - 2 events for SEQUENCE-START
+// - 3 events for MAPPING-START
+//
+func yaml_emitter_need_more_events(emitter *yaml_emitter_t) bool {
+ if emitter.events_head == len(emitter.events) {
+ return true
+ }
+ var accumulate int
+ switch emitter.events[emitter.events_head].typ {
+ case yaml_DOCUMENT_START_EVENT:
+ accumulate = 1
+ break
+ case yaml_SEQUENCE_START_EVENT:
+ accumulate = 2
+ break
+ case yaml_MAPPING_START_EVENT:
+ accumulate = 3
+ break
+ default:
+ return false
+ }
+ if len(emitter.events)-emitter.events_head > accumulate {
+ return false
+ }
+ var level int
+ for i := emitter.events_head; i < len(emitter.events); i++ {
+ switch emitter.events[i].typ {
+ case yaml_STREAM_START_EVENT, yaml_DOCUMENT_START_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT:
+ level++
+ case yaml_STREAM_END_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_END_EVENT, yaml_MAPPING_END_EVENT:
+ level--
+ }
+ if level == 0 {
+ return false
+ }
+ }
+ return true
+}
+
+// Append a directive to the directives stack.
+func yaml_emitter_append_tag_directive(emitter *yaml_emitter_t, value *yaml_tag_directive_t, allow_duplicates bool) bool {
+ for i := 0; i < len(emitter.tag_directives); i++ {
+ if bytes.Equal(value.handle, emitter.tag_directives[i].handle) {
+ if allow_duplicates {
+ return true
+ }
+ return yaml_emitter_set_emitter_error(emitter, "duplicate %TAG directive")
+ }
+ }
+
+ // [Go] Do we actually need to copy this given garbage collection
+ // and the lack of deallocating destructors?
+ tag_copy := yaml_tag_directive_t{
+ handle: make([]byte, len(value.handle)),
+ prefix: make([]byte, len(value.prefix)),
+ }
+ copy(tag_copy.handle, value.handle)
+ copy(tag_copy.prefix, value.prefix)
+ emitter.tag_directives = append(emitter.tag_directives, tag_copy)
+ return true
+}
+
+// Increase the indentation level.
+func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow, indentless bool) bool {
+ emitter.indents = append(emitter.indents, emitter.indent)
+ if emitter.indent < 0 {
+ if flow {
+ emitter.indent = emitter.best_indent
+ } else {
+ emitter.indent = 0
+ }
+ } else if !indentless {
+ emitter.indent += emitter.best_indent
+ }
+ return true
+}
+
+// State dispatcher.
+func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bool {
+ switch emitter.state {
+ default:
+ case yaml_EMIT_STREAM_START_STATE:
+ return yaml_emitter_emit_stream_start(emitter, event)
+
+ case yaml_EMIT_FIRST_DOCUMENT_START_STATE:
+ return yaml_emitter_emit_document_start(emitter, event, true)
+
+ case yaml_EMIT_DOCUMENT_START_STATE:
+ return yaml_emitter_emit_document_start(emitter, event, false)
+
+ case yaml_EMIT_DOCUMENT_CONTENT_STATE:
+ return yaml_emitter_emit_document_content(emitter, event)
+
+ case yaml_EMIT_DOCUMENT_END_STATE:
+ return yaml_emitter_emit_document_end(emitter, event)
+
+ case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE:
+ return yaml_emitter_emit_flow_sequence_item(emitter, event, true)
+
+ case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE:
+ return yaml_emitter_emit_flow_sequence_item(emitter, event, false)
+
+ case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE:
+ return yaml_emitter_emit_flow_mapping_key(emitter, event, true)
+
+ case yaml_EMIT_FLOW_MAPPING_KEY_STATE:
+ return yaml_emitter_emit_flow_mapping_key(emitter, event, false)
+
+ case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE:
+ return yaml_emitter_emit_flow_mapping_value(emitter, event, true)
+
+ case yaml_EMIT_FLOW_MAPPING_VALUE_STATE:
+ return yaml_emitter_emit_flow_mapping_value(emitter, event, false)
+
+ case yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE:
+ return yaml_emitter_emit_block_sequence_item(emitter, event, true)
+
+ case yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE:
+ return yaml_emitter_emit_block_sequence_item(emitter, event, false)
+
+ case yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE:
+ return yaml_emitter_emit_block_mapping_key(emitter, event, true)
+
+ case yaml_EMIT_BLOCK_MAPPING_KEY_STATE:
+ return yaml_emitter_emit_block_mapping_key(emitter, event, false)
+
+ case yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE:
+ return yaml_emitter_emit_block_mapping_value(emitter, event, true)
+
+ case yaml_EMIT_BLOCK_MAPPING_VALUE_STATE:
+ return yaml_emitter_emit_block_mapping_value(emitter, event, false)
+
+ case yaml_EMIT_END_STATE:
+ return yaml_emitter_set_emitter_error(emitter, "expected nothing after STREAM-END")
+ }
+ panic("invalid emitter state")
+}
+
+// Expect STREAM-START.
+func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t) bool {
+ if event.typ != yaml_STREAM_START_EVENT {
+ return yaml_emitter_set_emitter_error(emitter, "expected STREAM-START")
+ }
+ if emitter.encoding == yaml_ANY_ENCODING {
+ emitter.encoding = event.encoding
+ if emitter.encoding == yaml_ANY_ENCODING {
+ emitter.encoding = yaml_UTF8_ENCODING
+ }
+ }
+ if emitter.best_indent < 2 || emitter.best_indent > 9 {
+ emitter.best_indent = 2
+ }
+ if emitter.best_width >= 0 && emitter.best_width <= emitter.best_indent*2 {
+ emitter.best_width = 80
+ }
+ if emitter.best_width < 0 {
+ emitter.best_width = 1<<31 - 1
+ }
+ if emitter.line_break == yaml_ANY_BREAK {
+ emitter.line_break = yaml_LN_BREAK
+ }
+
+ emitter.indent = -1
+ emitter.line = 0
+ emitter.column = 0
+ emitter.whitespace = true
+ emitter.indention = true
+
+ if emitter.encoding != yaml_UTF8_ENCODING {
+ if !yaml_emitter_write_bom(emitter) {
+ return false
+ }
+ }
+ emitter.state = yaml_EMIT_FIRST_DOCUMENT_START_STATE
+ return true
+}
+
+// Expect DOCUMENT-START or STREAM-END.
+func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
+
+ if event.typ == yaml_DOCUMENT_START_EVENT {
+
+ if event.version_directive != nil {
+ if !yaml_emitter_analyze_version_directive(emitter, event.version_directive) {
+ return false
+ }
+ }
+
+ for i := 0; i < len(event.tag_directives); i++ {
+ tag_directive := &event.tag_directives[i]
+ if !yaml_emitter_analyze_tag_directive(emitter, tag_directive) {
+ return false
+ }
+ if !yaml_emitter_append_tag_directive(emitter, tag_directive, false) {
+ return false
+ }
+ }
+
+ for i := 0; i < len(default_tag_directives); i++ {
+ tag_directive := &default_tag_directives[i]
+ if !yaml_emitter_append_tag_directive(emitter, tag_directive, true) {
+ return false
+ }
+ }
+
+ implicit := event.implicit
+ if !first || emitter.canonical {
+ implicit = false
+ }
+
+ if emitter.open_ended && (event.version_directive != nil || len(event.tag_directives) > 0) {
+ if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) {
+ return false
+ }
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ }
+
+ if event.version_directive != nil {
+ implicit = false
+ if !yaml_emitter_write_indicator(emitter, []byte("%YAML"), true, false, false) {
+ return false
+ }
+ if !yaml_emitter_write_indicator(emitter, []byte("1.1"), true, false, false) {
+ return false
+ }
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ }
+
+ if len(event.tag_directives) > 0 {
+ implicit = false
+ for i := 0; i < len(event.tag_directives); i++ {
+ tag_directive := &event.tag_directives[i]
+ if !yaml_emitter_write_indicator(emitter, []byte("%TAG"), true, false, false) {
+ return false
+ }
+ if !yaml_emitter_write_tag_handle(emitter, tag_directive.handle) {
+ return false
+ }
+ if !yaml_emitter_write_tag_content(emitter, tag_directive.prefix, true) {
+ return false
+ }
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ }
+ }
+
+ if yaml_emitter_check_empty_document(emitter) {
+ implicit = false
+ }
+ if !implicit {
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) {
+ return false
+ }
+ if emitter.canonical {
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ }
+ }
+
+ emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE
+ return true
+ }
+
+ if event.typ == yaml_STREAM_END_EVENT {
+ if emitter.open_ended {
+ if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) {
+ return false
+ }
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ }
+ if !yaml_emitter_flush(emitter) {
+ return false
+ }
+ emitter.state = yaml_EMIT_END_STATE
+ return true
+ }
+
+ return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-START or STREAM-END")
+}
+
+// Expect the root node.
+func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool {
+ emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE)
+ return yaml_emitter_emit_node(emitter, event, true, false, false, false)
+}
+
+// Expect DOCUMENT-END.
+func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t) bool {
+ if event.typ != yaml_DOCUMENT_END_EVENT {
+ return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-END")
+ }
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ if !event.implicit {
+ // [Go] Allocate the slice elsewhere.
+ if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) {
+ return false
+ }
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ }
+ if !yaml_emitter_flush(emitter) {
+ return false
+ }
+ emitter.state = yaml_EMIT_DOCUMENT_START_STATE
+ emitter.tag_directives = emitter.tag_directives[:0]
+ return true
+}
+
+// Expect a flow item node.
+func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
+ if first {
+ if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) {
+ return false
+ }
+ if !yaml_emitter_increase_indent(emitter, true, false) {
+ return false
+ }
+ emitter.flow_level++
+ }
+
+ if event.typ == yaml_SEQUENCE_END_EVENT {
+ emitter.flow_level--
+ emitter.indent = emitter.indents[len(emitter.indents)-1]
+ emitter.indents = emitter.indents[:len(emitter.indents)-1]
+ if emitter.canonical && !first {
+ if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
+ return false
+ }
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ }
+ if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) {
+ return false
+ }
+ emitter.state = emitter.states[len(emitter.states)-1]
+ emitter.states = emitter.states[:len(emitter.states)-1]
+
+ return true
+ }
+
+ if !first {
+ if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
+ return false
+ }
+ }
+
+ if emitter.canonical || emitter.column > emitter.best_width {
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ }
+ emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE)
+ return yaml_emitter_emit_node(emitter, event, false, true, false, false)
+}
+
+// Expect a flow key node.
+func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
+ if first {
+ if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) {
+ return false
+ }
+ if !yaml_emitter_increase_indent(emitter, true, false) {
+ return false
+ }
+ emitter.flow_level++
+ }
+
+ if event.typ == yaml_MAPPING_END_EVENT {
+ emitter.flow_level--
+ emitter.indent = emitter.indents[len(emitter.indents)-1]
+ emitter.indents = emitter.indents[:len(emitter.indents)-1]
+ if emitter.canonical && !first {
+ if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
+ return false
+ }
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ }
+ if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) {
+ return false
+ }
+ emitter.state = emitter.states[len(emitter.states)-1]
+ emitter.states = emitter.states[:len(emitter.states)-1]
+ return true
+ }
+
+ if !first {
+ if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
+ return false
+ }
+ }
+ if emitter.canonical || emitter.column > emitter.best_width {
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ }
+
+ if !emitter.canonical && yaml_emitter_check_simple_key(emitter) {
+ emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE)
+ return yaml_emitter_emit_node(emitter, event, false, false, true, true)
+ }
+ if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, false) {
+ return false
+ }
+ emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_VALUE_STATE)
+ return yaml_emitter_emit_node(emitter, event, false, false, true, false)
+}
+
+// Expect a flow value node.
+func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool {
+ if simple {
+ if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) {
+ return false
+ }
+ } else {
+ if emitter.canonical || emitter.column > emitter.best_width {
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ }
+ if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, false) {
+ return false
+ }
+ }
+ emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE)
+ return yaml_emitter_emit_node(emitter, event, false, false, true, false)
+}
+
+// Expect a block item node.
+func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
+ if first {
+ if !yaml_emitter_increase_indent(emitter, false, emitter.mapping_context && !emitter.indention) {
+ return false
+ }
+ }
+ if event.typ == yaml_SEQUENCE_END_EVENT {
+ emitter.indent = emitter.indents[len(emitter.indents)-1]
+ emitter.indents = emitter.indents[:len(emitter.indents)-1]
+ emitter.state = emitter.states[len(emitter.states)-1]
+ emitter.states = emitter.states[:len(emitter.states)-1]
+ return true
+ }
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ if !yaml_emitter_write_indicator(emitter, []byte{'-'}, true, false, true) {
+ return false
+ }
+ emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE)
+ return yaml_emitter_emit_node(emitter, event, false, true, false, false)
+}
+
+// Expect a block key node.
+func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
+ if first {
+ if !yaml_emitter_increase_indent(emitter, false, false) {
+ return false
+ }
+ }
+ if event.typ == yaml_MAPPING_END_EVENT {
+ emitter.indent = emitter.indents[len(emitter.indents)-1]
+ emitter.indents = emitter.indents[:len(emitter.indents)-1]
+ emitter.state = emitter.states[len(emitter.states)-1]
+ emitter.states = emitter.states[:len(emitter.states)-1]
+ return true
+ }
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ if yaml_emitter_check_simple_key(emitter) {
+ emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE)
+ return yaml_emitter_emit_node(emitter, event, false, false, true, true)
+ }
+ if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, true) {
+ return false
+ }
+ emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_VALUE_STATE)
+ return yaml_emitter_emit_node(emitter, event, false, false, true, false)
+}
+
+// Expect a block value node.
+func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool {
+ if simple {
+ if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) {
+ return false
+ }
+ } else {
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, true) {
+ return false
+ }
+ }
+ emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE)
+ return yaml_emitter_emit_node(emitter, event, false, false, true, false)
+}
+
+// Expect a node.
+func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t,
+ root bool, sequence bool, mapping bool, simple_key bool) bool {
+
+ emitter.root_context = root
+ emitter.sequence_context = sequence
+ emitter.mapping_context = mapping
+ emitter.simple_key_context = simple_key
+
+ switch event.typ {
+ case yaml_ALIAS_EVENT:
+ return yaml_emitter_emit_alias(emitter, event)
+ case yaml_SCALAR_EVENT:
+ return yaml_emitter_emit_scalar(emitter, event)
+ case yaml_SEQUENCE_START_EVENT:
+ return yaml_emitter_emit_sequence_start(emitter, event)
+ case yaml_MAPPING_START_EVENT:
+ return yaml_emitter_emit_mapping_start(emitter, event)
+ default:
+ return yaml_emitter_set_emitter_error(emitter,
+ "expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS")
+ }
+ return false
+}
+
+// Expect ALIAS.
+func yaml_emitter_emit_alias(emitter *yaml_emitter_t, event *yaml_event_t) bool {
+ if !yaml_emitter_process_anchor(emitter) {
+ return false
+ }
+ emitter.state = emitter.states[len(emitter.states)-1]
+ emitter.states = emitter.states[:len(emitter.states)-1]
+ return true
+}
+
+// Expect SCALAR.
+func yaml_emitter_emit_scalar(emitter *yaml_emitter_t, event *yaml_event_t) bool {
+ if !yaml_emitter_select_scalar_style(emitter, event) {
+ return false
+ }
+ if !yaml_emitter_process_anchor(emitter) {
+ return false
+ }
+ if !yaml_emitter_process_tag(emitter) {
+ return false
+ }
+ if !yaml_emitter_increase_indent(emitter, true, false) {
+ return false
+ }
+ if !yaml_emitter_process_scalar(emitter) {
+ return false
+ }
+ emitter.indent = emitter.indents[len(emitter.indents)-1]
+ emitter.indents = emitter.indents[:len(emitter.indents)-1]
+ emitter.state = emitter.states[len(emitter.states)-1]
+ emitter.states = emitter.states[:len(emitter.states)-1]
+ return true
+}
+
+// Expect SEQUENCE-START.
+func yaml_emitter_emit_sequence_start(emitter *yaml_emitter_t, event *yaml_event_t) bool {
+ if !yaml_emitter_process_anchor(emitter) {
+ return false
+ }
+ if !yaml_emitter_process_tag(emitter) {
+ return false
+ }
+ if emitter.flow_level > 0 || emitter.canonical || event.sequence_style() == yaml_FLOW_SEQUENCE_STYLE ||
+ yaml_emitter_check_empty_sequence(emitter) {
+ emitter.state = yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE
+ } else {
+ emitter.state = yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE
+ }
+ return true
+}
+
+// Expect MAPPING-START.
+func yaml_emitter_emit_mapping_start(emitter *yaml_emitter_t, event *yaml_event_t) bool {
+ if !yaml_emitter_process_anchor(emitter) {
+ return false
+ }
+ if !yaml_emitter_process_tag(emitter) {
+ return false
+ }
+ if emitter.flow_level > 0 || emitter.canonical || event.mapping_style() == yaml_FLOW_MAPPING_STYLE ||
+ yaml_emitter_check_empty_mapping(emitter) {
+ emitter.state = yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE
+ } else {
+ emitter.state = yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE
+ }
+ return true
+}
+
+// Check if the document content is an empty scalar.
+func yaml_emitter_check_empty_document(emitter *yaml_emitter_t) bool {
+ return false // [Go] Huh?
+}
+
+// Check if the next events represent an empty sequence.
+func yaml_emitter_check_empty_sequence(emitter *yaml_emitter_t) bool {
+ if len(emitter.events)-emitter.events_head < 2 {
+ return false
+ }
+ return emitter.events[emitter.events_head].typ == yaml_SEQUENCE_START_EVENT &&
+ emitter.events[emitter.events_head+1].typ == yaml_SEQUENCE_END_EVENT
+}
+
+// Check if the next events represent an empty mapping.
+func yaml_emitter_check_empty_mapping(emitter *yaml_emitter_t) bool {
+ if len(emitter.events)-emitter.events_head < 2 {
+ return false
+ }
+ return emitter.events[emitter.events_head].typ == yaml_MAPPING_START_EVENT &&
+ emitter.events[emitter.events_head+1].typ == yaml_MAPPING_END_EVENT
+}
+
+// Check if the next node can be expressed as a simple key.
+func yaml_emitter_check_simple_key(emitter *yaml_emitter_t) bool {
+ length := 0
+ switch emitter.events[emitter.events_head].typ {
+ case yaml_ALIAS_EVENT:
+ length += len(emitter.anchor_data.anchor)
+ case yaml_SCALAR_EVENT:
+ if emitter.scalar_data.multiline {
+ return false
+ }
+ length += len(emitter.anchor_data.anchor) +
+ len(emitter.tag_data.handle) +
+ len(emitter.tag_data.suffix) +
+ len(emitter.scalar_data.value)
+ case yaml_SEQUENCE_START_EVENT:
+ if !yaml_emitter_check_empty_sequence(emitter) {
+ return false
+ }
+ length += len(emitter.anchor_data.anchor) +
+ len(emitter.tag_data.handle) +
+ len(emitter.tag_data.suffix)
+ case yaml_MAPPING_START_EVENT:
+ if !yaml_emitter_check_empty_mapping(emitter) {
+ return false
+ }
+ length += len(emitter.anchor_data.anchor) +
+ len(emitter.tag_data.handle) +
+ len(emitter.tag_data.suffix)
+ default:
+ return false
+ }
+ return length <= 128
+}
+
+// Determine an acceptable scalar style.
+func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event_t) bool {
+
+ no_tag := len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0
+ if no_tag && !event.implicit && !event.quoted_implicit {
+ return yaml_emitter_set_emitter_error(emitter, "neither tag nor implicit flags are specified")
+ }
+
+ style := event.scalar_style()
+ if style == yaml_ANY_SCALAR_STYLE {
+ style = yaml_PLAIN_SCALAR_STYLE
+ }
+ if emitter.canonical {
+ style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
+ }
+ if emitter.simple_key_context && emitter.scalar_data.multiline {
+ style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
+ }
+
+ if style == yaml_PLAIN_SCALAR_STYLE {
+ if emitter.flow_level > 0 && !emitter.scalar_data.flow_plain_allowed ||
+ emitter.flow_level == 0 && !emitter.scalar_data.block_plain_allowed {
+ style = yaml_SINGLE_QUOTED_SCALAR_STYLE
+ }
+ if len(emitter.scalar_data.value) == 0 && (emitter.flow_level > 0 || emitter.simple_key_context) {
+ style = yaml_SINGLE_QUOTED_SCALAR_STYLE
+ }
+ if no_tag && !event.implicit {
+ style = yaml_SINGLE_QUOTED_SCALAR_STYLE
+ }
+ }
+ if style == yaml_SINGLE_QUOTED_SCALAR_STYLE {
+ if !emitter.scalar_data.single_quoted_allowed {
+ style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
+ }
+ }
+ if style == yaml_LITERAL_SCALAR_STYLE || style == yaml_FOLDED_SCALAR_STYLE {
+ if !emitter.scalar_data.block_allowed || emitter.flow_level > 0 || emitter.simple_key_context {
+ style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
+ }
+ }
+
+ if no_tag && !event.quoted_implicit && style != yaml_PLAIN_SCALAR_STYLE {
+ emitter.tag_data.handle = []byte{'!'}
+ }
+ emitter.scalar_data.style = style
+ return true
+}
+
+// Write an achor.
+func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool {
+ if emitter.anchor_data.anchor == nil {
+ return true
+ }
+ c := []byte{'&'}
+ if emitter.anchor_data.alias {
+ c[0] = '*'
+ }
+ if !yaml_emitter_write_indicator(emitter, c, true, false, false) {
+ return false
+ }
+ return yaml_emitter_write_anchor(emitter, emitter.anchor_data.anchor)
+}
+
+// Write a tag.
+func yaml_emitter_process_tag(emitter *yaml_emitter_t) bool {
+ if len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 {
+ return true
+ }
+ if len(emitter.tag_data.handle) > 0 {
+ if !yaml_emitter_write_tag_handle(emitter, emitter.tag_data.handle) {
+ return false
+ }
+ if len(emitter.tag_data.suffix) > 0 {
+ if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) {
+ return false
+ }
+ }
+ } else {
+ // [Go] Allocate these slices elsewhere.
+ if !yaml_emitter_write_indicator(emitter, []byte("!<"), true, false, false) {
+ return false
+ }
+ if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) {
+ return false
+ }
+ if !yaml_emitter_write_indicator(emitter, []byte{'>'}, false, false, false) {
+ return false
+ }
+ }
+ return true
+}
+
+// Write a scalar.
+func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool {
+ switch emitter.scalar_data.style {
+ case yaml_PLAIN_SCALAR_STYLE:
+ return yaml_emitter_write_plain_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context)
+
+ case yaml_SINGLE_QUOTED_SCALAR_STYLE:
+ return yaml_emitter_write_single_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context)
+
+ case yaml_DOUBLE_QUOTED_SCALAR_STYLE:
+ return yaml_emitter_write_double_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context)
+
+ case yaml_LITERAL_SCALAR_STYLE:
+ return yaml_emitter_write_literal_scalar(emitter, emitter.scalar_data.value)
+
+ case yaml_FOLDED_SCALAR_STYLE:
+ return yaml_emitter_write_folded_scalar(emitter, emitter.scalar_data.value)
+ }
+ panic("unknown scalar style")
+}
+
+// Check if a %YAML directive is valid.
+func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool {
+ if version_directive.major != 1 || version_directive.minor != 1 {
+ return yaml_emitter_set_emitter_error(emitter, "incompatible %YAML directive")
+ }
+ return true
+}
+
+// Check if a %TAG directive is valid.
+func yaml_emitter_analyze_tag_directive(emitter *yaml_emitter_t, tag_directive *yaml_tag_directive_t) bool {
+ handle := tag_directive.handle
+ prefix := tag_directive.prefix
+ if len(handle) == 0 {
+ return yaml_emitter_set_emitter_error(emitter, "tag handle must not be empty")
+ }
+ if handle[0] != '!' {
+ return yaml_emitter_set_emitter_error(emitter, "tag handle must start with '!'")
+ }
+ if handle[len(handle)-1] != '!' {
+ return yaml_emitter_set_emitter_error(emitter, "tag handle must end with '!'")
+ }
+ for i := 1; i < len(handle)-1; i += width(handle[i]) {
+ if !is_alpha(handle, i) {
+ return yaml_emitter_set_emitter_error(emitter, "tag handle must contain alphanumerical characters only")
+ }
+ }
+ if len(prefix) == 0 {
+ return yaml_emitter_set_emitter_error(emitter, "tag prefix must not be empty")
+ }
+ return true
+}
+
+// Check if an anchor is valid.
+func yaml_emitter_analyze_anchor(emitter *yaml_emitter_t, anchor []byte, alias bool) bool {
+ if len(anchor) == 0 {
+ problem := "anchor value must not be empty"
+ if alias {
+ problem = "alias value must not be empty"
+ }
+ return yaml_emitter_set_emitter_error(emitter, problem)
+ }
+ for i := 0; i < len(anchor); i += width(anchor[i]) {
+ if !is_alpha(anchor, i) {
+ problem := "anchor value must contain alphanumerical characters only"
+ if alias {
+ problem = "alias value must contain alphanumerical characters only"
+ }
+ return yaml_emitter_set_emitter_error(emitter, problem)
+ }
+ }
+ emitter.anchor_data.anchor = anchor
+ emitter.anchor_data.alias = alias
+ return true
+}
+
+// Check if a tag is valid.
+func yaml_emitter_analyze_tag(emitter *yaml_emitter_t, tag []byte) bool {
+ if len(tag) == 0 {
+ return yaml_emitter_set_emitter_error(emitter, "tag value must not be empty")
+ }
+ for i := 0; i < len(emitter.tag_directives); i++ {
+ tag_directive := &emitter.tag_directives[i]
+ if bytes.HasPrefix(tag, tag_directive.prefix) {
+ emitter.tag_data.handle = tag_directive.handle
+ emitter.tag_data.suffix = tag[len(tag_directive.prefix):]
+ return true
+ }
+ }
+ emitter.tag_data.suffix = tag
+ return true
+}
+
+// Check if a scalar is valid.
+func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool {
+ var (
+ block_indicators = false
+ flow_indicators = false
+ line_breaks = false
+ special_characters = false
+
+ leading_space = false
+ leading_break = false
+ trailing_space = false
+ trailing_break = false
+ break_space = false
+ space_break = false
+
+ preceeded_by_whitespace = false
+ followed_by_whitespace = false
+ previous_space = false
+ previous_break = false
+ )
+
+ emitter.scalar_data.value = value
+
+ if len(value) == 0 {
+ emitter.scalar_data.multiline = false
+ emitter.scalar_data.flow_plain_allowed = false
+ emitter.scalar_data.block_plain_allowed = true
+ emitter.scalar_data.single_quoted_allowed = true
+ emitter.scalar_data.block_allowed = false
+ return true
+ }
+
+ if len(value) >= 3 && ((value[0] == '-' && value[1] == '-' && value[2] == '-') || (value[0] == '.' && value[1] == '.' && value[2] == '.')) {
+ block_indicators = true
+ flow_indicators = true
+ }
+
+ preceeded_by_whitespace = true
+ for i, w := 0, 0; i < len(value); i += w {
+ w = width(value[0])
+ followed_by_whitespace = i+w >= len(value) || is_blank(value, i+w)
+
+ if i == 0 {
+ switch value[i] {
+ case '#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`':
+ flow_indicators = true
+ block_indicators = true
+ case '?', ':':
+ flow_indicators = true
+ if followed_by_whitespace {
+ block_indicators = true
+ }
+ case '-':
+ if followed_by_whitespace {
+ flow_indicators = true
+ block_indicators = true
+ }
+ }
+ } else {
+ switch value[i] {
+ case ',', '?', '[', ']', '{', '}':
+ flow_indicators = true
+ case ':':
+ flow_indicators = true
+ if followed_by_whitespace {
+ block_indicators = true
+ }
+ case '#':
+ if preceeded_by_whitespace {
+ flow_indicators = true
+ block_indicators = true
+ }
+ }
+ }
+
+ if !is_printable(value, i) || !is_ascii(value, i) && !emitter.unicode {
+ special_characters = true
+ }
+ if is_space(value, i) {
+ if i == 0 {
+ leading_space = true
+ }
+ if i+width(value[i]) == len(value) {
+ trailing_space = true
+ }
+ if previous_break {
+ break_space = true
+ }
+ previous_space = true
+ previous_break = false
+ } else if is_break(value, i) {
+ line_breaks = true
+ if i == 0 {
+ leading_break = true
+ }
+ if i+width(value[i]) == len(value) {
+ trailing_break = true
+ }
+ if previous_space {
+ space_break = true
+ }
+ previous_space = false
+ previous_break = true
+ } else {
+ previous_space = false
+ previous_break = false
+ }
+
+ // [Go]: Why 'z'? Couldn't be the end of the string as that's the loop condition.
+ preceeded_by_whitespace = is_blankz(value, i)
+ }
+
+ emitter.scalar_data.multiline = line_breaks
+ emitter.scalar_data.flow_plain_allowed = true
+ emitter.scalar_data.block_plain_allowed = true
+ emitter.scalar_data.single_quoted_allowed = true
+ emitter.scalar_data.block_allowed = true
+
+ if leading_space || leading_break || trailing_space || trailing_break {
+ emitter.scalar_data.flow_plain_allowed = false
+ emitter.scalar_data.block_plain_allowed = false
+ }
+ if trailing_space {
+ emitter.scalar_data.block_allowed = false
+ }
+ if break_space {
+ emitter.scalar_data.flow_plain_allowed = false
+ emitter.scalar_data.block_plain_allowed = false
+ emitter.scalar_data.single_quoted_allowed = false
+ }
+ if space_break || special_characters {
+ emitter.scalar_data.flow_plain_allowed = false
+ emitter.scalar_data.block_plain_allowed = false
+ emitter.scalar_data.single_quoted_allowed = false
+ emitter.scalar_data.block_allowed = false
+ }
+ if line_breaks {
+ emitter.scalar_data.flow_plain_allowed = false
+ emitter.scalar_data.block_plain_allowed = false
+ }
+ if flow_indicators {
+ emitter.scalar_data.flow_plain_allowed = false
+ }
+ if block_indicators {
+ emitter.scalar_data.block_plain_allowed = false
+ }
+ return true
+}
+
+// Check if the event data is valid.
+func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bool {
+
+ emitter.anchor_data.anchor = nil
+ emitter.tag_data.handle = nil
+ emitter.tag_data.suffix = nil
+ emitter.scalar_data.value = nil
+
+ switch event.typ {
+ case yaml_ALIAS_EVENT:
+ if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) {
+ return false
+ }
+
+ case yaml_SCALAR_EVENT:
+ if len(event.anchor) > 0 {
+ if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) {
+ return false
+ }
+ }
+ if len(event.tag) > 0 && (emitter.canonical || (!event.implicit && !event.quoted_implicit)) {
+ if !yaml_emitter_analyze_tag(emitter, event.tag) {
+ return false
+ }
+ }
+ if !yaml_emitter_analyze_scalar(emitter, event.value) {
+ return false
+ }
+
+ case yaml_SEQUENCE_START_EVENT:
+ if len(event.anchor) > 0 {
+ if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) {
+ return false
+ }
+ }
+ if len(event.tag) > 0 && (emitter.canonical || !event.implicit) {
+ if !yaml_emitter_analyze_tag(emitter, event.tag) {
+ return false
+ }
+ }
+
+ case yaml_MAPPING_START_EVENT:
+ if len(event.anchor) > 0 {
+ if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) {
+ return false
+ }
+ }
+ if len(event.tag) > 0 && (emitter.canonical || !event.implicit) {
+ if !yaml_emitter_analyze_tag(emitter, event.tag) {
+ return false
+ }
+ }
+ }
+ return true
+}
+
+// Write the BOM character.
+func yaml_emitter_write_bom(emitter *yaml_emitter_t) bool {
+ if !flush(emitter) {
+ return false
+ }
+ pos := emitter.buffer_pos
+ emitter.buffer[pos+0] = '\xEF'
+ emitter.buffer[pos+1] = '\xBB'
+ emitter.buffer[pos+2] = '\xBF'
+ emitter.buffer_pos += 3
+ return true
+}
+
+func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool {
+ indent := emitter.indent
+ if indent < 0 {
+ indent = 0
+ }
+ if !emitter.indention || emitter.column > indent || (emitter.column == indent && !emitter.whitespace) {
+ if !put_break(emitter) {
+ return false
+ }
+ }
+ for emitter.column < indent {
+ if !put(emitter, ' ') {
+ return false
+ }
+ }
+ emitter.whitespace = true
+ emitter.indention = true
+ return true
+}
+
+func yaml_emitter_write_indicator(emitter *yaml_emitter_t, indicator []byte, need_whitespace, is_whitespace, is_indention bool) bool {
+ if need_whitespace && !emitter.whitespace {
+ if !put(emitter, ' ') {
+ return false
+ }
+ }
+ if !write_all(emitter, indicator) {
+ return false
+ }
+ emitter.whitespace = is_whitespace
+ emitter.indention = (emitter.indention && is_indention)
+ emitter.open_ended = false
+ return true
+}
+
+func yaml_emitter_write_anchor(emitter *yaml_emitter_t, value []byte) bool {
+ if !write_all(emitter, value) {
+ return false
+ }
+ emitter.whitespace = false
+ emitter.indention = false
+ return true
+}
+
+func yaml_emitter_write_tag_handle(emitter *yaml_emitter_t, value []byte) bool {
+ if !emitter.whitespace {
+ if !put(emitter, ' ') {
+ return false
+ }
+ }
+ if !write_all(emitter, value) {
+ return false
+ }
+ emitter.whitespace = false
+ emitter.indention = false
+ return true
+}
+
+func yaml_emitter_write_tag_content(emitter *yaml_emitter_t, value []byte, need_whitespace bool) bool {
+ if need_whitespace && !emitter.whitespace {
+ if !put(emitter, ' ') {
+ return false
+ }
+ }
+ for i := 0; i < len(value); {
+ var must_write bool
+ switch value[i] {
+ case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '~', '*', '\'', '(', ')', '[', ']':
+ must_write = true
+ default:
+ must_write = is_alpha(value, i)
+ }
+ if must_write {
+ if !write(emitter, value, &i) {
+ return false
+ }
+ } else {
+ w := width(value[i])
+ for k := 0; k < w; k++ {
+ octet := value[i]
+ i++
+ if !put(emitter, '%') {
+ return false
+ }
+
+ c := octet >> 4
+ if c < 10 {
+ c += '0'
+ } else {
+ c += 'A' - 10
+ }
+ if !put(emitter, c) {
+ return false
+ }
+
+ c = octet & 0x0f
+ if c < 10 {
+ c += '0'
+ } else {
+ c += 'A' - 10
+ }
+ if !put(emitter, c) {
+ return false
+ }
+ }
+ }
+ }
+ emitter.whitespace = false
+ emitter.indention = false
+ return true
+}
+
+func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool {
+ if !emitter.whitespace {
+ if !put(emitter, ' ') {
+ return false
+ }
+ }
+
+ spaces := false
+ breaks := false
+ for i := 0; i < len(value); {
+ if is_space(value, i) {
+ if allow_breaks && !spaces && emitter.column > emitter.best_width && !is_space(value, i+1) {
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ i += width(value[i])
+ } else {
+ if !write(emitter, value, &i) {
+ return false
+ }
+ }
+ spaces = true
+ } else if is_break(value, i) {
+ if !breaks && value[i] == '\n' {
+ if !put_break(emitter) {
+ return false
+ }
+ }
+ if !write_break(emitter, value, &i) {
+ return false
+ }
+ emitter.indention = true
+ breaks = true
+ } else {
+ if breaks {
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ }
+ if !write(emitter, value, &i) {
+ return false
+ }
+ emitter.indention = false
+ spaces = false
+ breaks = false
+ }
+ }
+
+ emitter.whitespace = false
+ emitter.indention = false
+ if emitter.root_context {
+ emitter.open_ended = true
+ }
+
+ return true
+}
+
+func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool {
+
+ if !yaml_emitter_write_indicator(emitter, []byte{'\''}, true, false, false) {
+ return false
+ }
+
+ spaces := false
+ breaks := false
+ for i := 0; i < len(value); {
+ if is_space(value, i) {
+ if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 && !is_space(value, i+1) {
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ i += width(value[i])
+ } else {
+ if !write(emitter, value, &i) {
+ return false
+ }
+ }
+ spaces = true
+ } else if is_break(value, i) {
+ if !breaks && value[i] == '\n' {
+ if !put_break(emitter) {
+ return false
+ }
+ }
+ if !write_break(emitter, value, &i) {
+ return false
+ }
+ emitter.indention = true
+ breaks = true
+ } else {
+ if breaks {
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ }
+ if value[i] == '\'' {
+ if !put(emitter, '\'') {
+ return false
+ }
+ }
+ if !write(emitter, value, &i) {
+ return false
+ }
+ emitter.indention = false
+ spaces = false
+ breaks = false
+ }
+ }
+ if !yaml_emitter_write_indicator(emitter, []byte{'\''}, false, false, false) {
+ return false
+ }
+ emitter.whitespace = false
+ emitter.indention = false
+ return true
+}
+
+func yaml_emitter_write_double_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool {
+ spaces := false
+ if !yaml_emitter_write_indicator(emitter, []byte{'"'}, true, false, false) {
+ return false
+ }
+
+ for i := 0; i < len(value); {
+ if !is_printable(value, i) || (!emitter.unicode && !is_ascii(value, i)) ||
+ is_bom(value, i) || is_break(value, i) ||
+ value[i] == '"' || value[i] == '\\' {
+
+ octet := value[i]
+
+ var w int
+ var v rune
+ switch {
+ case octet&0x80 == 0x00:
+ w, v = 1, rune(octet&0x7F)
+ case octet&0xE0 == 0xC0:
+ w, v = 2, rune(octet&0x1F)
+ case octet&0xF0 == 0xE0:
+ w, v = 3, rune(octet&0x0F)
+ case octet&0xF8 == 0xF0:
+ w, v = 4, rune(octet&0x07)
+ }
+ for k := 1; k < w; k++ {
+ octet = value[i+k]
+ v = (v << 6) + (rune(octet) & 0x3F)
+ }
+ i += w
+
+ if !put(emitter, '\\') {
+ return false
+ }
+
+ var ok bool
+ switch v {
+ case 0x00:
+ ok = put(emitter, '0')
+ case 0x07:
+ ok = put(emitter, 'a')
+ case 0x08:
+ ok = put(emitter, 'b')
+ case 0x09:
+ ok = put(emitter, 't')
+ case 0x0A:
+ ok = put(emitter, 'n')
+ case 0x0b:
+ ok = put(emitter, 'v')
+ case 0x0c:
+ ok = put(emitter, 'f')
+ case 0x0d:
+ ok = put(emitter, 'r')
+ case 0x1b:
+ ok = put(emitter, 'e')
+ case 0x22:
+ ok = put(emitter, '"')
+ case 0x5c:
+ ok = put(emitter, '\\')
+ case 0x85:
+ ok = put(emitter, 'N')
+ case 0xA0:
+ ok = put(emitter, '_')
+ case 0x2028:
+ ok = put(emitter, 'L')
+ case 0x2029:
+ ok = put(emitter, 'P')
+ default:
+ if v <= 0xFF {
+ ok = put(emitter, 'x')
+ w = 2
+ } else if v <= 0xFFFF {
+ ok = put(emitter, 'u')
+ w = 4
+ } else {
+ ok = put(emitter, 'U')
+ w = 8
+ }
+ for k := (w - 1) * 4; ok && k >= 0; k -= 4 {
+ digit := byte((v >> uint(k)) & 0x0F)
+ if digit < 10 {
+ ok = put(emitter, digit+'0')
+ } else {
+ ok = put(emitter, digit+'A'-10)
+ }
+ }
+ }
+ if !ok {
+ return false
+ }
+ spaces = false
+ } else if is_space(value, i) {
+ if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 {
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ if is_space(value, i+1) {
+ if !put(emitter, '\\') {
+ return false
+ }
+ }
+ i += width(value[i])
+ } else if !write(emitter, value, &i) {
+ return false
+ }
+ spaces = true
+ } else {
+ if !write(emitter, value, &i) {
+ return false
+ }
+ spaces = false
+ }
+ }
+ if !yaml_emitter_write_indicator(emitter, []byte{'"'}, false, false, false) {
+ return false
+ }
+ emitter.whitespace = false
+ emitter.indention = false
+ return true
+}
+
+func yaml_emitter_write_block_scalar_hints(emitter *yaml_emitter_t, value []byte) bool {
+ if is_space(value, 0) || is_break(value, 0) {
+ indent_hint := []byte{'0' + byte(emitter.best_indent)}
+ if !yaml_emitter_write_indicator(emitter, indent_hint, false, false, false) {
+ return false
+ }
+ }
+
+ emitter.open_ended = false
+
+ var chomp_hint [1]byte
+ if len(value) == 0 {
+ chomp_hint[0] = '-'
+ } else {
+ i := len(value) - 1
+ for value[i]&0xC0 == 0x80 {
+ i--
+ }
+ if !is_break(value, i) {
+ chomp_hint[0] = '-'
+ } else if i == 0 {
+ chomp_hint[0] = '+'
+ emitter.open_ended = true
+ } else {
+ i--
+ for value[i]&0xC0 == 0x80 {
+ i--
+ }
+ if is_break(value, i) {
+ chomp_hint[0] = '+'
+ emitter.open_ended = true
+ }
+ }
+ }
+ if chomp_hint[0] != 0 {
+ if !yaml_emitter_write_indicator(emitter, chomp_hint[:], false, false, false) {
+ return false
+ }
+ }
+ return true
+}
+
+func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bool {
+ if !yaml_emitter_write_indicator(emitter, []byte{'|'}, true, false, false) {
+ return false
+ }
+ if !yaml_emitter_write_block_scalar_hints(emitter, value) {
+ return false
+ }
+ if !put_break(emitter) {
+ return false
+ }
+ emitter.indention = true
+ emitter.whitespace = true
+ breaks := true
+ for i := 0; i < len(value); {
+ if is_break(value, i) {
+ if !write_break(emitter, value, &i) {
+ return false
+ }
+ emitter.indention = true
+ breaks = true
+ } else {
+ if breaks {
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ }
+ if !write(emitter, value, &i) {
+ return false
+ }
+ emitter.indention = false
+ breaks = false
+ }
+ }
+
+ return true
+}
+
+func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) bool {
+ if !yaml_emitter_write_indicator(emitter, []byte{'>'}, true, false, false) {
+ return false
+ }
+ if !yaml_emitter_write_block_scalar_hints(emitter, value) {
+ return false
+ }
+
+ if !put_break(emitter) {
+ return false
+ }
+ emitter.indention = true
+ emitter.whitespace = true
+
+ breaks := true
+ leading_spaces := true
+ for i := 0; i < len(value); {
+ if is_break(value, i) {
+ if !breaks && !leading_spaces && value[i] == '\n' {
+ k := 0
+ for is_break(value, k) {
+ k += width(value[k])
+ }
+ if !is_blankz(value, k) {
+ if !put_break(emitter) {
+ return false
+ }
+ }
+ }
+ if !write_break(emitter, value, &i) {
+ return false
+ }
+ emitter.indention = true
+ breaks = true
+ } else {
+ if breaks {
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ leading_spaces = is_blank(value, i)
+ }
+ if !breaks && is_space(value, i) && !is_space(value, i+1) && emitter.column > emitter.best_width {
+ if !yaml_emitter_write_indent(emitter) {
+ return false
+ }
+ i += width(value[i])
+ } else {
+ if !write(emitter, value, &i) {
+ return false
+ }
+ }
+ emitter.indention = false
+ breaks = false
+ }
+ }
+ return true
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/encode.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/encode.go
new file mode 100644
index 00000000..0b9048d7
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/encode.go
@@ -0,0 +1,265 @@
+package yaml
+
+import (
+ "reflect"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+)
+
+type encoder struct {
+ emitter yaml_emitter_t
+ event yaml_event_t
+ out []byte
+ flow bool
+}
+
+func newEncoder() (e *encoder) {
+ e = &encoder{}
+ e.must(yaml_emitter_initialize(&e.emitter))
+ yaml_emitter_set_output_string(&e.emitter, &e.out)
+ e.must(yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING))
+ e.emit()
+ e.must(yaml_document_start_event_initialize(&e.event, nil, nil, true))
+ e.emit()
+ return e
+}
+
+func (e *encoder) finish() {
+ e.must(yaml_document_end_event_initialize(&e.event, true))
+ e.emit()
+ e.emitter.open_ended = false
+ e.must(yaml_stream_end_event_initialize(&e.event))
+ e.emit()
+}
+
+func (e *encoder) destroy() {
+ yaml_emitter_delete(&e.emitter)
+}
+
+func (e *encoder) emit() {
+ // This will internally delete the e.event value.
+ if !yaml_emitter_emit(&e.emitter, &e.event) && e.event.typ != yaml_DOCUMENT_END_EVENT && e.event.typ != yaml_STREAM_END_EVENT {
+ e.must(false)
+ }
+}
+
+func (e *encoder) must(ok bool) {
+ if !ok {
+ msg := e.emitter.problem
+ if msg == "" {
+ msg = "Unknown problem generating YAML content"
+ }
+ fail(msg)
+ }
+}
+
+func (e *encoder) marshal(tag string, in reflect.Value) {
+ if !in.IsValid() {
+ e.nilv()
+ return
+ }
+ var value interface{}
+ if getter, ok := in.Interface().(Getter); ok {
+ tag, value = getter.GetYAML()
+ tag = longTag(tag)
+ if value == nil {
+ e.nilv()
+ return
+ }
+ in = reflect.ValueOf(value)
+ }
+ switch in.Kind() {
+ case reflect.Interface:
+ if in.IsNil() {
+ e.nilv()
+ } else {
+ e.marshal(tag, in.Elem())
+ }
+ case reflect.Map:
+ e.mapv(tag, in)
+ case reflect.Ptr:
+ if in.IsNil() {
+ e.nilv()
+ } else {
+ e.marshal(tag, in.Elem())
+ }
+ case reflect.Struct:
+ e.structv(tag, in)
+ case reflect.Slice:
+ e.slicev(tag, in)
+ case reflect.String:
+ e.stringv(tag, in)
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ if in.Type() == durationType {
+ e.stringv(tag, reflect.ValueOf(in.Interface().(time.Duration).String()))
+ } else {
+ e.intv(tag, in)
+ }
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ e.uintv(tag, in)
+ case reflect.Float32, reflect.Float64:
+ e.floatv(tag, in)
+ case reflect.Bool:
+ e.boolv(tag, in)
+ default:
+ panic("Can't marshal type: " + in.Type().String())
+ }
+}
+
+func (e *encoder) mapv(tag string, in reflect.Value) {
+ e.mappingv(tag, func() {
+ keys := keyList(in.MapKeys())
+ sort.Sort(keys)
+ for _, k := range keys {
+ e.marshal("", k)
+ e.marshal("", in.MapIndex(k))
+ }
+ })
+}
+
+func (e *encoder) structv(tag string, in reflect.Value) {
+ sinfo, err := getStructInfo(in.Type())
+ if err != nil {
+ panic(err)
+ }
+ e.mappingv(tag, func() {
+ for _, info := range sinfo.FieldsList {
+ var value reflect.Value
+ if info.Inline == nil {
+ value = in.Field(info.Num)
+ } else {
+ value = in.FieldByIndex(info.Inline)
+ }
+ if info.OmitEmpty && isZero(value) {
+ continue
+ }
+ e.marshal("", reflect.ValueOf(info.Key))
+ e.flow = info.Flow
+ e.marshal("", value)
+ }
+ })
+}
+
+func (e *encoder) mappingv(tag string, f func()) {
+ implicit := tag == ""
+ style := yaml_BLOCK_MAPPING_STYLE
+ if e.flow {
+ e.flow = false
+ style = yaml_FLOW_MAPPING_STYLE
+ }
+ e.must(yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
+ e.emit()
+ f()
+ e.must(yaml_mapping_end_event_initialize(&e.event))
+ e.emit()
+}
+
+func (e *encoder) slicev(tag string, in reflect.Value) {
+ implicit := tag == ""
+ style := yaml_BLOCK_SEQUENCE_STYLE
+ if e.flow {
+ e.flow = false
+ style = yaml_FLOW_SEQUENCE_STYLE
+ }
+ e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
+ e.emit()
+ n := in.Len()
+ for i := 0; i < n; i++ {
+ e.marshal("", in.Index(i))
+ }
+ e.must(yaml_sequence_end_event_initialize(&e.event))
+ e.emit()
+}
+
+// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1.
+//
+// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported
+// in YAML 1.2 and by this package, but these should be marshalled quoted for
+// the time being for compatibility with other parsers.
+func isBase60Float(s string) (result bool) {
+ // Fast path.
+ if s == "" {
+ return false
+ }
+ c := s[0]
+ if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 {
+ return false
+ }
+ // Do the full match.
+ return base60float.MatchString(s)
+}
+
+// From http://yaml.org/type/float.html, except the regular expression there
+// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix.
+var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`)
+
+func (e *encoder) stringv(tag string, in reflect.Value) {
+ var style yaml_scalar_style_t
+ s := in.String()
+ rtag, rs := resolve("", s)
+ if rtag == yaml_BINARY_TAG {
+ if tag == "" || tag == yaml_STR_TAG {
+ tag = rtag
+ s = rs.(string)
+ } else if tag == yaml_BINARY_TAG {
+ fail("explicitly tagged !!binary data must be base64-encoded")
+ } else {
+ fail("cannot marshal invalid UTF-8 data as " + shortTag(tag))
+ }
+ }
+ if tag == "" && (rtag != yaml_STR_TAG || isBase60Float(s)) {
+ style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
+ } else if strings.Contains(s, "\n") {
+ style = yaml_LITERAL_SCALAR_STYLE
+ } else {
+ style = yaml_PLAIN_SCALAR_STYLE
+ }
+ e.emitScalar(s, "", tag, style)
+}
+
+func (e *encoder) boolv(tag string, in reflect.Value) {
+ var s string
+ if in.Bool() {
+ s = "true"
+ } else {
+ s = "false"
+ }
+ e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
+}
+
+func (e *encoder) intv(tag string, in reflect.Value) {
+ s := strconv.FormatInt(in.Int(), 10)
+ e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
+}
+
+func (e *encoder) uintv(tag string, in reflect.Value) {
+ s := strconv.FormatUint(in.Uint(), 10)
+ e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
+}
+
+func (e *encoder) floatv(tag string, in reflect.Value) {
+ // FIXME: Handle 64 bits here.
+ s := strconv.FormatFloat(float64(in.Float()), 'g', -1, 32)
+ switch s {
+ case "+Inf":
+ s = ".inf"
+ case "-Inf":
+ s = "-.inf"
+ case "NaN":
+ s = ".nan"
+ }
+ e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
+}
+
+func (e *encoder) nilv() {
+ e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE)
+}
+
+func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) {
+ implicit := tag == ""
+ e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style))
+ e.emit()
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/parserc.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/parserc.go
new file mode 100644
index 00000000..0a7037ad
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/parserc.go
@@ -0,0 +1,1096 @@
+package yaml
+
+import (
+ "bytes"
+)
+
+// The parser implements the following grammar:
+//
+// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
+// implicit_document ::= block_node DOCUMENT-END*
+// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
+// block_node_or_indentless_sequence ::=
+// ALIAS
+// | properties (block_content | indentless_block_sequence)?
+// | block_content
+// | indentless_block_sequence
+// block_node ::= ALIAS
+// | properties block_content?
+// | block_content
+// flow_node ::= ALIAS
+// | properties flow_content?
+// | flow_content
+// properties ::= TAG ANCHOR? | ANCHOR TAG?
+// block_content ::= block_collection | flow_collection | SCALAR
+// flow_content ::= flow_collection | SCALAR
+// block_collection ::= block_sequence | block_mapping
+// flow_collection ::= flow_sequence | flow_mapping
+// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
+// indentless_sequence ::= (BLOCK-ENTRY block_node?)+
+// block_mapping ::= BLOCK-MAPPING_START
+// ((KEY block_node_or_indentless_sequence?)?
+// (VALUE block_node_or_indentless_sequence?)?)*
+// BLOCK-END
+// flow_sequence ::= FLOW-SEQUENCE-START
+// (flow_sequence_entry FLOW-ENTRY)*
+// flow_sequence_entry?
+// FLOW-SEQUENCE-END
+// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+// flow_mapping ::= FLOW-MAPPING-START
+// (flow_mapping_entry FLOW-ENTRY)*
+// flow_mapping_entry?
+// FLOW-MAPPING-END
+// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+
+// Peek the next token in the token queue.
+func peek_token(parser *yaml_parser_t) *yaml_token_t {
+ if parser.token_available || yaml_parser_fetch_more_tokens(parser) {
+ return &parser.tokens[parser.tokens_head]
+ }
+ return nil
+}
+
+// Remove the next token from the queue (must be called after peek_token).
+func skip_token(parser *yaml_parser_t) {
+ parser.token_available = false
+ parser.tokens_parsed++
+ parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN
+ parser.tokens_head++
+}
+
+// Get the next event.
+func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool {
+ // Erase the event object.
+ *event = yaml_event_t{}
+
+ // No events after the end of the stream or error.
+ if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE {
+ return true
+ }
+
+ // Generate the next event.
+ return yaml_parser_state_machine(parser, event)
+}
+
+// Set parser error.
+func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool {
+ parser.error = yaml_PARSER_ERROR
+ parser.problem = problem
+ parser.problem_mark = problem_mark
+ return false
+}
+
+func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool {
+ parser.error = yaml_PARSER_ERROR
+ parser.context = context
+ parser.context_mark = context_mark
+ parser.problem = problem
+ parser.problem_mark = problem_mark
+ return false
+}
+
+// State dispatcher.
+func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool {
+ //trace("yaml_parser_state_machine", "state:", parser.state.String())
+
+ switch parser.state {
+ case yaml_PARSE_STREAM_START_STATE:
+ return yaml_parser_parse_stream_start(parser, event)
+
+ case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE:
+ return yaml_parser_parse_document_start(parser, event, true)
+
+ case yaml_PARSE_DOCUMENT_START_STATE:
+ return yaml_parser_parse_document_start(parser, event, false)
+
+ case yaml_PARSE_DOCUMENT_CONTENT_STATE:
+ return yaml_parser_parse_document_content(parser, event)
+
+ case yaml_PARSE_DOCUMENT_END_STATE:
+ return yaml_parser_parse_document_end(parser, event)
+
+ case yaml_PARSE_BLOCK_NODE_STATE:
+ return yaml_parser_parse_node(parser, event, true, false)
+
+ case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE:
+ return yaml_parser_parse_node(parser, event, true, true)
+
+ case yaml_PARSE_FLOW_NODE_STATE:
+ return yaml_parser_parse_node(parser, event, false, false)
+
+ case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE:
+ return yaml_parser_parse_block_sequence_entry(parser, event, true)
+
+ case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE:
+ return yaml_parser_parse_block_sequence_entry(parser, event, false)
+
+ case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE:
+ return yaml_parser_parse_indentless_sequence_entry(parser, event)
+
+ case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE:
+ return yaml_parser_parse_block_mapping_key(parser, event, true)
+
+ case yaml_PARSE_BLOCK_MAPPING_KEY_STATE:
+ return yaml_parser_parse_block_mapping_key(parser, event, false)
+
+ case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE:
+ return yaml_parser_parse_block_mapping_value(parser, event)
+
+ case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE:
+ return yaml_parser_parse_flow_sequence_entry(parser, event, true)
+
+ case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE:
+ return yaml_parser_parse_flow_sequence_entry(parser, event, false)
+
+ case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE:
+ return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event)
+
+ case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE:
+ return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event)
+
+ case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE:
+ return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event)
+
+ case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE:
+ return yaml_parser_parse_flow_mapping_key(parser, event, true)
+
+ case yaml_PARSE_FLOW_MAPPING_KEY_STATE:
+ return yaml_parser_parse_flow_mapping_key(parser, event, false)
+
+ case yaml_PARSE_FLOW_MAPPING_VALUE_STATE:
+ return yaml_parser_parse_flow_mapping_value(parser, event, false)
+
+ case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE:
+ return yaml_parser_parse_flow_mapping_value(parser, event, true)
+
+ default:
+ panic("invalid parser state")
+ }
+ return false
+}
+
+// Parse the production:
+// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
+// ************
+func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool {
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ != yaml_STREAM_START_TOKEN {
+ return yaml_parser_set_parser_error(parser, "did not find expected ", token.start_mark)
+ }
+ parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE
+ *event = yaml_event_t{
+ typ: yaml_STREAM_START_EVENT,
+ start_mark: token.start_mark,
+ end_mark: token.end_mark,
+ encoding: token.encoding,
+ }
+ skip_token(parser)
+ return true
+}
+
+// Parse the productions:
+// implicit_document ::= block_node DOCUMENT-END*
+// *
+// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
+// *************************
+func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool {
+
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+
+ // Parse extra document end indicators.
+ if !implicit {
+ for token.typ == yaml_DOCUMENT_END_TOKEN {
+ skip_token(parser)
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ }
+ }
+
+ if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN &&
+ token.typ != yaml_TAG_DIRECTIVE_TOKEN &&
+ token.typ != yaml_DOCUMENT_START_TOKEN &&
+ token.typ != yaml_STREAM_END_TOKEN {
+ // Parse an implicit document.
+ if !yaml_parser_process_directives(parser, nil, nil) {
+ return false
+ }
+ parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE)
+ parser.state = yaml_PARSE_BLOCK_NODE_STATE
+
+ *event = yaml_event_t{
+ typ: yaml_DOCUMENT_START_EVENT,
+ start_mark: token.start_mark,
+ end_mark: token.end_mark,
+ }
+
+ } else if token.typ != yaml_STREAM_END_TOKEN {
+ // Parse an explicit document.
+ var version_directive *yaml_version_directive_t
+ var tag_directives []yaml_tag_directive_t
+ start_mark := token.start_mark
+ if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) {
+ return false
+ }
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ != yaml_DOCUMENT_START_TOKEN {
+ yaml_parser_set_parser_error(parser,
+ "did not find expected ", token.start_mark)
+ return false
+ }
+ parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE)
+ parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE
+ end_mark := token.end_mark
+
+ *event = yaml_event_t{
+ typ: yaml_DOCUMENT_START_EVENT,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ version_directive: version_directive,
+ tag_directives: tag_directives,
+ implicit: false,
+ }
+ skip_token(parser)
+
+ } else {
+ // Parse the stream end.
+ parser.state = yaml_PARSE_END_STATE
+ *event = yaml_event_t{
+ typ: yaml_STREAM_END_EVENT,
+ start_mark: token.start_mark,
+ end_mark: token.end_mark,
+ }
+ skip_token(parser)
+ }
+
+ return true
+}
+
+// Parse the productions:
+// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
+// ***********
+//
+func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool {
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ == yaml_VERSION_DIRECTIVE_TOKEN ||
+ token.typ == yaml_TAG_DIRECTIVE_TOKEN ||
+ token.typ == yaml_DOCUMENT_START_TOKEN ||
+ token.typ == yaml_DOCUMENT_END_TOKEN ||
+ token.typ == yaml_STREAM_END_TOKEN {
+ parser.state = parser.states[len(parser.states)-1]
+ parser.states = parser.states[:len(parser.states)-1]
+ return yaml_parser_process_empty_scalar(parser, event,
+ token.start_mark)
+ }
+ return yaml_parser_parse_node(parser, event, true, false)
+}
+
+// Parse the productions:
+// implicit_document ::= block_node DOCUMENT-END*
+// *************
+// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
+//
+func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool {
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+
+ start_mark := token.start_mark
+ end_mark := token.start_mark
+
+ implicit := true
+ if token.typ == yaml_DOCUMENT_END_TOKEN {
+ end_mark = token.end_mark
+ skip_token(parser)
+ implicit = false
+ }
+
+ parser.tag_directives = parser.tag_directives[:0]
+
+ parser.state = yaml_PARSE_DOCUMENT_START_STATE
+ *event = yaml_event_t{
+ typ: yaml_DOCUMENT_END_EVENT,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ implicit: implicit,
+ }
+ return true
+}
+
+// Parse the productions:
+// block_node_or_indentless_sequence ::=
+// ALIAS
+// *****
+// | properties (block_content | indentless_block_sequence)?
+// ********** *
+// | block_content | indentless_block_sequence
+// *
+// block_node ::= ALIAS
+// *****
+// | properties block_content?
+// ********** *
+// | block_content
+// *
+// flow_node ::= ALIAS
+// *****
+// | properties flow_content?
+// ********** *
+// | flow_content
+// *
+// properties ::= TAG ANCHOR? | ANCHOR TAG?
+// *************************
+// block_content ::= block_collection | flow_collection | SCALAR
+// ******
+// flow_content ::= flow_collection | SCALAR
+// ******
+func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool {
+ //defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)()
+
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+
+ if token.typ == yaml_ALIAS_TOKEN {
+ parser.state = parser.states[len(parser.states)-1]
+ parser.states = parser.states[:len(parser.states)-1]
+ *event = yaml_event_t{
+ typ: yaml_ALIAS_EVENT,
+ start_mark: token.start_mark,
+ end_mark: token.end_mark,
+ anchor: token.value,
+ }
+ skip_token(parser)
+ return true
+ }
+
+ start_mark := token.start_mark
+ end_mark := token.start_mark
+
+ var tag_token bool
+ var tag_handle, tag_suffix, anchor []byte
+ var tag_mark yaml_mark_t
+ if token.typ == yaml_ANCHOR_TOKEN {
+ anchor = token.value
+ start_mark = token.start_mark
+ end_mark = token.end_mark
+ skip_token(parser)
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ == yaml_TAG_TOKEN {
+ tag_token = true
+ tag_handle = token.value
+ tag_suffix = token.suffix
+ tag_mark = token.start_mark
+ end_mark = token.end_mark
+ skip_token(parser)
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ }
+ } else if token.typ == yaml_TAG_TOKEN {
+ tag_token = true
+ tag_handle = token.value
+ tag_suffix = token.suffix
+ start_mark = token.start_mark
+ tag_mark = token.start_mark
+ end_mark = token.end_mark
+ skip_token(parser)
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ == yaml_ANCHOR_TOKEN {
+ anchor = token.value
+ end_mark = token.end_mark
+ skip_token(parser)
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ }
+ }
+
+ var tag []byte
+ if tag_token {
+ if len(tag_handle) == 0 {
+ tag = tag_suffix
+ tag_suffix = nil
+ } else {
+ for i := range parser.tag_directives {
+ if bytes.Equal(parser.tag_directives[i].handle, tag_handle) {
+ tag = append([]byte(nil), parser.tag_directives[i].prefix...)
+ tag = append(tag, tag_suffix...)
+ break
+ }
+ }
+ if len(tag) == 0 {
+ yaml_parser_set_parser_error_context(parser,
+ "while parsing a node", start_mark,
+ "found undefined tag handle", tag_mark)
+ return false
+ }
+ }
+ }
+
+ implicit := len(tag) == 0
+ if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN {
+ end_mark = token.end_mark
+ parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE
+ *event = yaml_event_t{
+ typ: yaml_SEQUENCE_START_EVENT,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ anchor: anchor,
+ tag: tag,
+ implicit: implicit,
+ style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE),
+ }
+ return true
+ }
+ if token.typ == yaml_SCALAR_TOKEN {
+ var plain_implicit, quoted_implicit bool
+ end_mark = token.end_mark
+ if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') {
+ plain_implicit = true
+ } else if len(tag) == 0 {
+ quoted_implicit = true
+ }
+ parser.state = parser.states[len(parser.states)-1]
+ parser.states = parser.states[:len(parser.states)-1]
+
+ *event = yaml_event_t{
+ typ: yaml_SCALAR_EVENT,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ anchor: anchor,
+ tag: tag,
+ value: token.value,
+ implicit: plain_implicit,
+ quoted_implicit: quoted_implicit,
+ style: yaml_style_t(token.style),
+ }
+ skip_token(parser)
+ return true
+ }
+ if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN {
+ // [Go] Some of the events below can be merged as they differ only on style.
+ end_mark = token.end_mark
+ parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE
+ *event = yaml_event_t{
+ typ: yaml_SEQUENCE_START_EVENT,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ anchor: anchor,
+ tag: tag,
+ implicit: implicit,
+ style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE),
+ }
+ return true
+ }
+ if token.typ == yaml_FLOW_MAPPING_START_TOKEN {
+ end_mark = token.end_mark
+ parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE
+ *event = yaml_event_t{
+ typ: yaml_MAPPING_START_EVENT,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ anchor: anchor,
+ tag: tag,
+ implicit: implicit,
+ style: yaml_style_t(yaml_FLOW_MAPPING_STYLE),
+ }
+ return true
+ }
+ if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN {
+ end_mark = token.end_mark
+ parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE
+ *event = yaml_event_t{
+ typ: yaml_SEQUENCE_START_EVENT,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ anchor: anchor,
+ tag: tag,
+ implicit: implicit,
+ style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE),
+ }
+ return true
+ }
+ if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN {
+ end_mark = token.end_mark
+ parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE
+ *event = yaml_event_t{
+ typ: yaml_MAPPING_START_EVENT,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ anchor: anchor,
+ tag: tag,
+ implicit: implicit,
+ style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE),
+ }
+ return true
+ }
+ if len(anchor) > 0 || len(tag) > 0 {
+ parser.state = parser.states[len(parser.states)-1]
+ parser.states = parser.states[:len(parser.states)-1]
+
+ *event = yaml_event_t{
+ typ: yaml_SCALAR_EVENT,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ anchor: anchor,
+ tag: tag,
+ implicit: implicit,
+ quoted_implicit: false,
+ style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE),
+ }
+ return true
+ }
+
+ context := "while parsing a flow node"
+ if block {
+ context = "while parsing a block node"
+ }
+ yaml_parser_set_parser_error_context(parser, context, start_mark,
+ "did not find expected node content", token.start_mark)
+ return false
+}
+
+// Parse the productions:
+// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
+// ******************** *********** * *********
+//
+func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {
+ if first {
+ token := peek_token(parser)
+ parser.marks = append(parser.marks, token.start_mark)
+ skip_token(parser)
+ }
+
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+
+ if token.typ == yaml_BLOCK_ENTRY_TOKEN {
+ mark := token.end_mark
+ skip_token(parser)
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN {
+ parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE)
+ return yaml_parser_parse_node(parser, event, true, false)
+ } else {
+ parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE
+ return yaml_parser_process_empty_scalar(parser, event, mark)
+ }
+ }
+ if token.typ == yaml_BLOCK_END_TOKEN {
+ parser.state = parser.states[len(parser.states)-1]
+ parser.states = parser.states[:len(parser.states)-1]
+ parser.marks = parser.marks[:len(parser.marks)-1]
+
+ *event = yaml_event_t{
+ typ: yaml_SEQUENCE_END_EVENT,
+ start_mark: token.start_mark,
+ end_mark: token.end_mark,
+ }
+
+ skip_token(parser)
+ return true
+ }
+
+ context_mark := parser.marks[len(parser.marks)-1]
+ parser.marks = parser.marks[:len(parser.marks)-1]
+ return yaml_parser_set_parser_error_context(parser,
+ "while parsing a block collection", context_mark,
+ "did not find expected '-' indicator", token.start_mark)
+}
+
+// Parse the productions:
+// indentless_sequence ::= (BLOCK-ENTRY block_node?)+
+// *********** *
+func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool {
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+
+ if token.typ == yaml_BLOCK_ENTRY_TOKEN {
+ mark := token.end_mark
+ skip_token(parser)
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ != yaml_BLOCK_ENTRY_TOKEN &&
+ token.typ != yaml_KEY_TOKEN &&
+ token.typ != yaml_VALUE_TOKEN &&
+ token.typ != yaml_BLOCK_END_TOKEN {
+ parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE)
+ return yaml_parser_parse_node(parser, event, true, false)
+ }
+ parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE
+ return yaml_parser_process_empty_scalar(parser, event, mark)
+ }
+ parser.state = parser.states[len(parser.states)-1]
+ parser.states = parser.states[:len(parser.states)-1]
+
+ *event = yaml_event_t{
+ typ: yaml_SEQUENCE_END_EVENT,
+ start_mark: token.start_mark,
+ end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark?
+ }
+ return true
+}
+
+// Parse the productions:
+// block_mapping ::= BLOCK-MAPPING_START
+// *******************
+// ((KEY block_node_or_indentless_sequence?)?
+// *** *
+// (VALUE block_node_or_indentless_sequence?)?)*
+//
+// BLOCK-END
+// *********
+//
+func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {
+ if first {
+ token := peek_token(parser)
+ parser.marks = append(parser.marks, token.start_mark)
+ skip_token(parser)
+ }
+
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+
+ if token.typ == yaml_KEY_TOKEN {
+ mark := token.end_mark
+ skip_token(parser)
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ != yaml_KEY_TOKEN &&
+ token.typ != yaml_VALUE_TOKEN &&
+ token.typ != yaml_BLOCK_END_TOKEN {
+ parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE)
+ return yaml_parser_parse_node(parser, event, true, true)
+ } else {
+ parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE
+ return yaml_parser_process_empty_scalar(parser, event, mark)
+ }
+ } else if token.typ == yaml_BLOCK_END_TOKEN {
+ parser.state = parser.states[len(parser.states)-1]
+ parser.states = parser.states[:len(parser.states)-1]
+ parser.marks = parser.marks[:len(parser.marks)-1]
+ *event = yaml_event_t{
+ typ: yaml_MAPPING_END_EVENT,
+ start_mark: token.start_mark,
+ end_mark: token.end_mark,
+ }
+ skip_token(parser)
+ return true
+ }
+
+ context_mark := parser.marks[len(parser.marks)-1]
+ parser.marks = parser.marks[:len(parser.marks)-1]
+ return yaml_parser_set_parser_error_context(parser,
+ "while parsing a block mapping", context_mark,
+ "did not find expected key", token.start_mark)
+}
+
+// Parse the productions:
+// block_mapping ::= BLOCK-MAPPING_START
+//
+// ((KEY block_node_or_indentless_sequence?)?
+//
+// (VALUE block_node_or_indentless_sequence?)?)*
+// ***** *
+// BLOCK-END
+//
+//
+func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool {
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ == yaml_VALUE_TOKEN {
+ mark := token.end_mark
+ skip_token(parser)
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ != yaml_KEY_TOKEN &&
+ token.typ != yaml_VALUE_TOKEN &&
+ token.typ != yaml_BLOCK_END_TOKEN {
+ parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE)
+ return yaml_parser_parse_node(parser, event, true, true)
+ }
+ parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE
+ return yaml_parser_process_empty_scalar(parser, event, mark)
+ }
+ parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE
+ return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
+}
+
+// Parse the productions:
+// flow_sequence ::= FLOW-SEQUENCE-START
+// *******************
+// (flow_sequence_entry FLOW-ENTRY)*
+// * **********
+// flow_sequence_entry?
+// *
+// FLOW-SEQUENCE-END
+// *****************
+// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+// *
+//
+func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {
+ if first {
+ token := peek_token(parser)
+ parser.marks = append(parser.marks, token.start_mark)
+ skip_token(parser)
+ }
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {
+ if !first {
+ if token.typ == yaml_FLOW_ENTRY_TOKEN {
+ skip_token(parser)
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ } else {
+ context_mark := parser.marks[len(parser.marks)-1]
+ parser.marks = parser.marks[:len(parser.marks)-1]
+ return yaml_parser_set_parser_error_context(parser,
+ "while parsing a flow sequence", context_mark,
+ "did not find expected ',' or ']'", token.start_mark)
+ }
+ }
+
+ if token.typ == yaml_KEY_TOKEN {
+ parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE
+ *event = yaml_event_t{
+ typ: yaml_MAPPING_START_EVENT,
+ start_mark: token.start_mark,
+ end_mark: token.end_mark,
+ implicit: true,
+ style: yaml_style_t(yaml_FLOW_MAPPING_STYLE),
+ }
+ skip_token(parser)
+ return true
+ } else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {
+ parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE)
+ return yaml_parser_parse_node(parser, event, false, false)
+ }
+ }
+
+ parser.state = parser.states[len(parser.states)-1]
+ parser.states = parser.states[:len(parser.states)-1]
+ parser.marks = parser.marks[:len(parser.marks)-1]
+
+ *event = yaml_event_t{
+ typ: yaml_SEQUENCE_END_EVENT,
+ start_mark: token.start_mark,
+ end_mark: token.end_mark,
+ }
+
+ skip_token(parser)
+ return true
+}
+
+//
+// Parse the productions:
+// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+// *** *
+//
+func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool {
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ != yaml_VALUE_TOKEN &&
+ token.typ != yaml_FLOW_ENTRY_TOKEN &&
+ token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {
+ parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE)
+ return yaml_parser_parse_node(parser, event, false, false)
+ }
+ mark := token.end_mark
+ skip_token(parser)
+ parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE
+ return yaml_parser_process_empty_scalar(parser, event, mark)
+}
+
+// Parse the productions:
+// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+// ***** *
+//
+func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool {
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ == yaml_VALUE_TOKEN {
+ skip_token(parser)
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {
+ parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE)
+ return yaml_parser_parse_node(parser, event, false, false)
+ }
+ }
+ parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE
+ return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
+}
+
+// Parse the productions:
+// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+// *
+//
+func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool {
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+ parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE
+ *event = yaml_event_t{
+ typ: yaml_MAPPING_END_EVENT,
+ start_mark: token.start_mark,
+ end_mark: token.start_mark, // [Go] Shouldn't this be end_mark?
+ }
+ return true
+}
+
+// Parse the productions:
+// flow_mapping ::= FLOW-MAPPING-START
+// ******************
+// (flow_mapping_entry FLOW-ENTRY)*
+// * **********
+// flow_mapping_entry?
+// ******************
+// FLOW-MAPPING-END
+// ****************
+// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+// * *** *
+//
+func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {
+ if first {
+ token := peek_token(parser)
+ parser.marks = append(parser.marks, token.start_mark)
+ skip_token(parser)
+ }
+
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+
+ if token.typ != yaml_FLOW_MAPPING_END_TOKEN {
+ if !first {
+ if token.typ == yaml_FLOW_ENTRY_TOKEN {
+ skip_token(parser)
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ } else {
+ context_mark := parser.marks[len(parser.marks)-1]
+ parser.marks = parser.marks[:len(parser.marks)-1]
+ return yaml_parser_set_parser_error_context(parser,
+ "while parsing a flow mapping", context_mark,
+ "did not find expected ',' or '}'", token.start_mark)
+ }
+ }
+
+ if token.typ == yaml_KEY_TOKEN {
+ skip_token(parser)
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ != yaml_VALUE_TOKEN &&
+ token.typ != yaml_FLOW_ENTRY_TOKEN &&
+ token.typ != yaml_FLOW_MAPPING_END_TOKEN {
+ parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE)
+ return yaml_parser_parse_node(parser, event, false, false)
+ } else {
+ parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE
+ return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
+ }
+ } else if token.typ != yaml_FLOW_MAPPING_END_TOKEN {
+ parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE)
+ return yaml_parser_parse_node(parser, event, false, false)
+ }
+ }
+
+ parser.state = parser.states[len(parser.states)-1]
+ parser.states = parser.states[:len(parser.states)-1]
+ parser.marks = parser.marks[:len(parser.marks)-1]
+ *event = yaml_event_t{
+ typ: yaml_MAPPING_END_EVENT,
+ start_mark: token.start_mark,
+ end_mark: token.end_mark,
+ }
+ skip_token(parser)
+ return true
+}
+
+// Parse the productions:
+// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+// * ***** *
+//
+func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool {
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if empty {
+ parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE
+ return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
+ }
+ if token.typ == yaml_VALUE_TOKEN {
+ skip_token(parser)
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN {
+ parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE)
+ return yaml_parser_parse_node(parser, event, false, false)
+ }
+ }
+ parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE
+ return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
+}
+
+// Generate an empty scalar event.
+func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool {
+ *event = yaml_event_t{
+ typ: yaml_SCALAR_EVENT,
+ start_mark: mark,
+ end_mark: mark,
+ value: nil, // Empty
+ implicit: true,
+ style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE),
+ }
+ return true
+}
+
+var default_tag_directives = []yaml_tag_directive_t{
+ {[]byte("!"), []byte("!")},
+ {[]byte("!!"), []byte("tag:yaml.org,2002:")},
+}
+
+// Parse directives.
+func yaml_parser_process_directives(parser *yaml_parser_t,
+ version_directive_ref **yaml_version_directive_t,
+ tag_directives_ref *[]yaml_tag_directive_t) bool {
+
+ var version_directive *yaml_version_directive_t
+ var tag_directives []yaml_tag_directive_t
+
+ token := peek_token(parser)
+ if token == nil {
+ return false
+ }
+
+ for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN {
+ if token.typ == yaml_VERSION_DIRECTIVE_TOKEN {
+ if version_directive != nil {
+ yaml_parser_set_parser_error(parser,
+ "found duplicate %YAML directive", token.start_mark)
+ return false
+ }
+ if token.major != 1 || token.minor != 1 {
+ yaml_parser_set_parser_error(parser,
+ "found incompatible YAML document", token.start_mark)
+ return false
+ }
+ version_directive = &yaml_version_directive_t{
+ major: token.major,
+ minor: token.minor,
+ }
+ } else if token.typ == yaml_TAG_DIRECTIVE_TOKEN {
+ value := yaml_tag_directive_t{
+ handle: token.value,
+ prefix: token.prefix,
+ }
+ if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) {
+ return false
+ }
+ tag_directives = append(tag_directives, value)
+ }
+
+ skip_token(parser)
+ token = peek_token(parser)
+ if token == nil {
+ return false
+ }
+ }
+
+ for i := range default_tag_directives {
+ if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) {
+ return false
+ }
+ }
+
+ if version_directive_ref != nil {
+ *version_directive_ref = version_directive
+ }
+ if tag_directives_ref != nil {
+ *tag_directives_ref = tag_directives
+ }
+ return true
+}
+
+// Append a tag directive to the directives stack.
+func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool {
+ for i := range parser.tag_directives {
+ if bytes.Equal(value.handle, parser.tag_directives[i].handle) {
+ if allow_duplicates {
+ return true
+ }
+ return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark)
+ }
+ }
+
+ // [Go] I suspect the copy is unnecessary. This was likely done
+ // because there was no way to track ownership of the data.
+ value_copy := yaml_tag_directive_t{
+ handle: make([]byte, len(value.handle)),
+ prefix: make([]byte, len(value.prefix)),
+ }
+ copy(value_copy.handle, value.handle)
+ copy(value_copy.prefix, value.prefix)
+ parser.tag_directives = append(parser.tag_directives, value_copy)
+ return true
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/readerc.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/readerc.go
new file mode 100644
index 00000000..d5fb0972
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/readerc.go
@@ -0,0 +1,391 @@
+package yaml
+
+import (
+ "io"
+)
+
+// Set the reader error and return 0.
+func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool {
+ parser.error = yaml_READER_ERROR
+ parser.problem = problem
+ parser.problem_offset = offset
+ parser.problem_value = value
+ return false
+}
+
+// Byte order marks.
+const (
+ bom_UTF8 = "\xef\xbb\xbf"
+ bom_UTF16LE = "\xff\xfe"
+ bom_UTF16BE = "\xfe\xff"
+)
+
+// Determine the input stream encoding by checking the BOM symbol. If no BOM is
+// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure.
+func yaml_parser_determine_encoding(parser *yaml_parser_t) bool {
+ // Ensure that we had enough bytes in the raw buffer.
+ for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 {
+ if !yaml_parser_update_raw_buffer(parser) {
+ return false
+ }
+ }
+
+ // Determine the encoding.
+ buf := parser.raw_buffer
+ pos := parser.raw_buffer_pos
+ avail := len(buf) - pos
+ if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] {
+ parser.encoding = yaml_UTF16LE_ENCODING
+ parser.raw_buffer_pos += 2
+ parser.offset += 2
+ } else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] {
+ parser.encoding = yaml_UTF16BE_ENCODING
+ parser.raw_buffer_pos += 2
+ parser.offset += 2
+ } else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] {
+ parser.encoding = yaml_UTF8_ENCODING
+ parser.raw_buffer_pos += 3
+ parser.offset += 3
+ } else {
+ parser.encoding = yaml_UTF8_ENCODING
+ }
+ return true
+}
+
+// Update the raw buffer.
+func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool {
+ size_read := 0
+
+ // Return if the raw buffer is full.
+ if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) {
+ return true
+ }
+
+ // Return on EOF.
+ if parser.eof {
+ return true
+ }
+
+ // Move the remaining bytes in the raw buffer to the beginning.
+ if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) {
+ copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:])
+ }
+ parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos]
+ parser.raw_buffer_pos = 0
+
+ // Call the read handler to fill the buffer.
+ size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)])
+ parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read]
+ if err == io.EOF {
+ parser.eof = true
+ } else if err != nil {
+ return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1)
+ }
+ return true
+}
+
+// Ensure that the buffer contains at least `length` characters.
+// Return true on success, false on failure.
+//
+// The length is supposed to be significantly less that the buffer size.
+func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool {
+ if parser.read_handler == nil {
+ panic("read handler must be set")
+ }
+
+ // If the EOF flag is set and the raw buffer is empty, do nothing.
+ if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) {
+ return true
+ }
+
+ // Return if the buffer contains enough characters.
+ if parser.unread >= length {
+ return true
+ }
+
+ // Determine the input encoding if it is not known yet.
+ if parser.encoding == yaml_ANY_ENCODING {
+ if !yaml_parser_determine_encoding(parser) {
+ return false
+ }
+ }
+
+ // Move the unread characters to the beginning of the buffer.
+ buffer_len := len(parser.buffer)
+ if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len {
+ copy(parser.buffer, parser.buffer[parser.buffer_pos:])
+ buffer_len -= parser.buffer_pos
+ parser.buffer_pos = 0
+ } else if parser.buffer_pos == buffer_len {
+ buffer_len = 0
+ parser.buffer_pos = 0
+ }
+
+ // Open the whole buffer for writing, and cut it before returning.
+ parser.buffer = parser.buffer[:cap(parser.buffer)]
+
+ // Fill the buffer until it has enough characters.
+ first := true
+ for parser.unread < length {
+
+ // Fill the raw buffer if necessary.
+ if !first || parser.raw_buffer_pos == len(parser.raw_buffer) {
+ if !yaml_parser_update_raw_buffer(parser) {
+ parser.buffer = parser.buffer[:buffer_len]
+ return false
+ }
+ }
+ first = false
+
+ // Decode the raw buffer.
+ inner:
+ for parser.raw_buffer_pos != len(parser.raw_buffer) {
+ var value rune
+ var width int
+
+ raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos
+
+ // Decode the next character.
+ switch parser.encoding {
+ case yaml_UTF8_ENCODING:
+ // Decode a UTF-8 character. Check RFC 3629
+ // (http://www.ietf.org/rfc/rfc3629.txt) for more details.
+ //
+ // The following table (taken from the RFC) is used for
+ // decoding.
+ //
+ // Char. number range | UTF-8 octet sequence
+ // (hexadecimal) | (binary)
+ // --------------------+------------------------------------
+ // 0000 0000-0000 007F | 0xxxxxxx
+ // 0000 0080-0000 07FF | 110xxxxx 10xxxxxx
+ // 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx
+ // 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
+ //
+ // Additionally, the characters in the range 0xD800-0xDFFF
+ // are prohibited as they are reserved for use with UTF-16
+ // surrogate pairs.
+
+ // Determine the length of the UTF-8 sequence.
+ octet := parser.raw_buffer[parser.raw_buffer_pos]
+ switch {
+ case octet&0x80 == 0x00:
+ width = 1
+ case octet&0xE0 == 0xC0:
+ width = 2
+ case octet&0xF0 == 0xE0:
+ width = 3
+ case octet&0xF8 == 0xF0:
+ width = 4
+ default:
+ // The leading octet is invalid.
+ return yaml_parser_set_reader_error(parser,
+ "invalid leading UTF-8 octet",
+ parser.offset, int(octet))
+ }
+
+ // Check if the raw buffer contains an incomplete character.
+ if width > raw_unread {
+ if parser.eof {
+ return yaml_parser_set_reader_error(parser,
+ "incomplete UTF-8 octet sequence",
+ parser.offset, -1)
+ }
+ break inner
+ }
+
+ // Decode the leading octet.
+ switch {
+ case octet&0x80 == 0x00:
+ value = rune(octet & 0x7F)
+ case octet&0xE0 == 0xC0:
+ value = rune(octet & 0x1F)
+ case octet&0xF0 == 0xE0:
+ value = rune(octet & 0x0F)
+ case octet&0xF8 == 0xF0:
+ value = rune(octet & 0x07)
+ default:
+ value = 0
+ }
+
+ // Check and decode the trailing octets.
+ for k := 1; k < width; k++ {
+ octet = parser.raw_buffer[parser.raw_buffer_pos+k]
+
+ // Check if the octet is valid.
+ if (octet & 0xC0) != 0x80 {
+ return yaml_parser_set_reader_error(parser,
+ "invalid trailing UTF-8 octet",
+ parser.offset+k, int(octet))
+ }
+
+ // Decode the octet.
+ value = (value << 6) + rune(octet&0x3F)
+ }
+
+ // Check the length of the sequence against the value.
+ switch {
+ case width == 1:
+ case width == 2 && value >= 0x80:
+ case width == 3 && value >= 0x800:
+ case width == 4 && value >= 0x10000:
+ default:
+ return yaml_parser_set_reader_error(parser,
+ "invalid length of a UTF-8 sequence",
+ parser.offset, -1)
+ }
+
+ // Check the range of the value.
+ if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF {
+ return yaml_parser_set_reader_error(parser,
+ "invalid Unicode character",
+ parser.offset, int(value))
+ }
+
+ case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING:
+ var low, high int
+ if parser.encoding == yaml_UTF16LE_ENCODING {
+ low, high = 0, 1
+ } else {
+ high, low = 1, 0
+ }
+
+ // The UTF-16 encoding is not as simple as one might
+ // naively think. Check RFC 2781
+ // (http://www.ietf.org/rfc/rfc2781.txt).
+ //
+ // Normally, two subsequent bytes describe a Unicode
+ // character. However a special technique (called a
+ // surrogate pair) is used for specifying character
+ // values larger than 0xFFFF.
+ //
+ // A surrogate pair consists of two pseudo-characters:
+ // high surrogate area (0xD800-0xDBFF)
+ // low surrogate area (0xDC00-0xDFFF)
+ //
+ // The following formulas are used for decoding
+ // and encoding characters using surrogate pairs:
+ //
+ // U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF)
+ // U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF)
+ // W1 = 110110yyyyyyyyyy
+ // W2 = 110111xxxxxxxxxx
+ //
+ // where U is the character value, W1 is the high surrogate
+ // area, W2 is the low surrogate area.
+
+ // Check for incomplete UTF-16 character.
+ if raw_unread < 2 {
+ if parser.eof {
+ return yaml_parser_set_reader_error(parser,
+ "incomplete UTF-16 character",
+ parser.offset, -1)
+ }
+ break inner
+ }
+
+ // Get the character.
+ value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) +
+ (rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8)
+
+ // Check for unexpected low surrogate area.
+ if value&0xFC00 == 0xDC00 {
+ return yaml_parser_set_reader_error(parser,
+ "unexpected low surrogate area",
+ parser.offset, int(value))
+ }
+
+ // Check for a high surrogate area.
+ if value&0xFC00 == 0xD800 {
+ width = 4
+
+ // Check for incomplete surrogate pair.
+ if raw_unread < 4 {
+ if parser.eof {
+ return yaml_parser_set_reader_error(parser,
+ "incomplete UTF-16 surrogate pair",
+ parser.offset, -1)
+ }
+ break inner
+ }
+
+ // Get the next character.
+ value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) +
+ (rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8)
+
+ // Check for a low surrogate area.
+ if value2&0xFC00 != 0xDC00 {
+ return yaml_parser_set_reader_error(parser,
+ "expected low surrogate area",
+ parser.offset+2, int(value2))
+ }
+
+ // Generate the value of the surrogate pair.
+ value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF)
+ } else {
+ width = 2
+ }
+
+ default:
+ panic("impossible")
+ }
+
+ // Check if the character is in the allowed range:
+ // #x9 | #xA | #xD | [#x20-#x7E] (8 bit)
+ // | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit)
+ // | [#x10000-#x10FFFF] (32 bit)
+ switch {
+ case value == 0x09:
+ case value == 0x0A:
+ case value == 0x0D:
+ case value >= 0x20 && value <= 0x7E:
+ case value == 0x85:
+ case value >= 0xA0 && value <= 0xD7FF:
+ case value >= 0xE000 && value <= 0xFFFD:
+ case value >= 0x10000 && value <= 0x10FFFF:
+ default:
+ return yaml_parser_set_reader_error(parser,
+ "control characters are not allowed",
+ parser.offset, int(value))
+ }
+
+ // Move the raw pointers.
+ parser.raw_buffer_pos += width
+ parser.offset += width
+
+ // Finally put the character into the buffer.
+ if value <= 0x7F {
+ // 0000 0000-0000 007F . 0xxxxxxx
+ parser.buffer[buffer_len+0] = byte(value)
+ } else if value <= 0x7FF {
+ // 0000 0080-0000 07FF . 110xxxxx 10xxxxxx
+ parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6))
+ parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F))
+ } else if value <= 0xFFFF {
+ // 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx
+ parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12))
+ parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F))
+ parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F))
+ } else {
+ // 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
+ parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18))
+ parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F))
+ parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F))
+ parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F))
+ }
+ buffer_len += width
+
+ parser.unread++
+ }
+
+ // On EOF, put NUL into the buffer and return.
+ if parser.eof {
+ parser.buffer[buffer_len] = 0
+ buffer_len++
+ parser.unread++
+ break
+ }
+ }
+ parser.buffer = parser.buffer[:buffer_len]
+ return true
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/resolve.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/resolve.go
new file mode 100644
index 00000000..06c698a2
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/resolve.go
@@ -0,0 +1,190 @@
+package yaml
+
+import (
+ "encoding/base64"
+ "fmt"
+ "math"
+ "strconv"
+ "strings"
+ "unicode/utf8"
+)
+
+// TODO: merge, timestamps, base 60 floats, omap.
+
+type resolveMapItem struct {
+ value interface{}
+ tag string
+}
+
+var resolveTable = make([]byte, 256)
+var resolveMap = make(map[string]resolveMapItem)
+
+func init() {
+ t := resolveTable
+ t[int('+')] = 'S' // Sign
+ t[int('-')] = 'S'
+ for _, c := range "0123456789" {
+ t[int(c)] = 'D' // Digit
+ }
+ for _, c := range "yYnNtTfFoO~" {
+ t[int(c)] = 'M' // In map
+ }
+ t[int('.')] = '.' // Float (potentially in map)
+
+ var resolveMapList = []struct {
+ v interface{}
+ tag string
+ l []string
+ }{
+ {true, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}},
+ {true, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}},
+ {true, yaml_BOOL_TAG, []string{"on", "On", "ON"}},
+ {false, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}},
+ {false, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}},
+ {false, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}},
+ {nil, yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}},
+ {math.NaN(), yaml_FLOAT_TAG, []string{".nan", ".NaN", ".NAN"}},
+ {math.Inf(+1), yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}},
+ {math.Inf(+1), yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}},
+ {math.Inf(-1), yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}},
+ {"<<", yaml_MERGE_TAG, []string{"<<"}},
+ }
+
+ m := resolveMap
+ for _, item := range resolveMapList {
+ for _, s := range item.l {
+ m[s] = resolveMapItem{item.v, item.tag}
+ }
+ }
+}
+
+const longTagPrefix = "tag:yaml.org,2002:"
+
+func shortTag(tag string) string {
+ // TODO This can easily be made faster and produce less garbage.
+ if strings.HasPrefix(tag, longTagPrefix) {
+ return "!!" + tag[len(longTagPrefix):]
+ }
+ return tag
+}
+
+func longTag(tag string) string {
+ if strings.HasPrefix(tag, "!!") {
+ return longTagPrefix + tag[2:]
+ }
+ return tag
+}
+
+func resolvableTag(tag string) bool {
+ switch tag {
+ case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG:
+ return true
+ }
+ return false
+}
+
+func resolve(tag string, in string) (rtag string, out interface{}) {
+ if !resolvableTag(tag) {
+ return tag, in
+ }
+
+ defer func() {
+ switch tag {
+ case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG:
+ return
+ }
+ fail(fmt.Sprintf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag)))
+ }()
+
+ // Any data is accepted as a !!str or !!binary.
+ // Otherwise, the prefix is enough of a hint about what it might be.
+ hint := byte('N')
+ if in != "" {
+ hint = resolveTable[in[0]]
+ }
+ if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG {
+ // Handle things we can lookup in a map.
+ if item, ok := resolveMap[in]; ok {
+ return item.tag, item.value
+ }
+
+ // Base 60 floats are a bad idea, were dropped in YAML 1.2, and
+ // are purposefully unsupported here. They're still quoted on
+ // the way out for compatibility with other parser, though.
+
+ switch hint {
+ case 'M':
+ // We've already checked the map above.
+
+ case '.':
+ // Not in the map, so maybe a normal float.
+ floatv, err := strconv.ParseFloat(in, 64)
+ if err == nil {
+ return yaml_FLOAT_TAG, floatv
+ }
+
+ case 'D', 'S':
+ // Int, float, or timestamp.
+ plain := strings.Replace(in, "_", "", -1)
+ intv, err := strconv.ParseInt(plain, 0, 64)
+ if err == nil {
+ if intv == int64(int(intv)) {
+ return yaml_INT_TAG, int(intv)
+ } else {
+ return yaml_INT_TAG, intv
+ }
+ }
+ floatv, err := strconv.ParseFloat(plain, 64)
+ if err == nil {
+ return yaml_FLOAT_TAG, floatv
+ }
+ if strings.HasPrefix(plain, "0b") {
+ intv, err := strconv.ParseInt(plain[2:], 2, 64)
+ if err == nil {
+ return yaml_INT_TAG, int(intv)
+ }
+ } else if strings.HasPrefix(plain, "-0b") {
+ intv, err := strconv.ParseInt(plain[3:], 2, 64)
+ if err == nil {
+ return yaml_INT_TAG, -int(intv)
+ }
+ }
+ // XXX Handle timestamps here.
+
+ default:
+ panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")")
+ }
+ }
+ if tag == yaml_BINARY_TAG {
+ return yaml_BINARY_TAG, in
+ }
+ if utf8.ValidString(in) {
+ return yaml_STR_TAG, in
+ }
+ return yaml_BINARY_TAG, encodeBase64(in)
+}
+
+// encodeBase64 encodes s as base64 that is broken up into multiple lines
+// as appropriate for the resulting length.
+func encodeBase64(s string) string {
+ const lineLen = 70
+ encLen := base64.StdEncoding.EncodedLen(len(s))
+ lines := encLen/lineLen + 1
+ buf := make([]byte, encLen*2+lines)
+ in := buf[0:encLen]
+ out := buf[encLen:]
+ base64.StdEncoding.Encode(in, []byte(s))
+ k := 0
+ for i := 0; i < len(in); i += lineLen {
+ j := i + lineLen
+ if j > len(in) {
+ j = len(in)
+ }
+ k += copy(out[k:], in[i:j])
+ if lines > 1 {
+ out[k] = '\n'
+ k++
+ }
+ }
+ return string(out[:k])
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/scannerc.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/scannerc.go
new file mode 100644
index 00000000..fe93b190
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/scannerc.go
@@ -0,0 +1,2710 @@
+package yaml
+
+import (
+ "bytes"
+ "fmt"
+)
+
+// Introduction
+// ************
+//
+// The following notes assume that you are familiar with the YAML specification
+// (http://yaml.org/spec/cvs/current.html). We mostly follow it, although in
+// some cases we are less restrictive that it requires.
+//
+// The process of transforming a YAML stream into a sequence of events is
+// divided on two steps: Scanning and Parsing.
+//
+// The Scanner transforms the input stream into a sequence of tokens, while the
+// parser transform the sequence of tokens produced by the Scanner into a
+// sequence of parsing events.
+//
+// The Scanner is rather clever and complicated. The Parser, on the contrary,
+// is a straightforward implementation of a recursive-descendant parser (or,
+// LL(1) parser, as it is usually called).
+//
+// Actually there are two issues of Scanning that might be called "clever", the
+// rest is quite straightforward. The issues are "block collection start" and
+// "simple keys". Both issues are explained below in details.
+//
+// Here the Scanning step is explained and implemented. We start with the list
+// of all the tokens produced by the Scanner together with short descriptions.
+//
+// Now, tokens:
+//
+// STREAM-START(encoding) # The stream start.
+// STREAM-END # The stream end.
+// VERSION-DIRECTIVE(major,minor) # The '%YAML' directive.
+// TAG-DIRECTIVE(handle,prefix) # The '%TAG' directive.
+// DOCUMENT-START # '---'
+// DOCUMENT-END # '...'
+// BLOCK-SEQUENCE-START # Indentation increase denoting a block
+// BLOCK-MAPPING-START # sequence or a block mapping.
+// BLOCK-END # Indentation decrease.
+// FLOW-SEQUENCE-START # '['
+// FLOW-SEQUENCE-END # ']'
+// BLOCK-SEQUENCE-START # '{'
+// BLOCK-SEQUENCE-END # '}'
+// BLOCK-ENTRY # '-'
+// FLOW-ENTRY # ','
+// KEY # '?' or nothing (simple keys).
+// VALUE # ':'
+// ALIAS(anchor) # '*anchor'
+// ANCHOR(anchor) # '&anchor'
+// TAG(handle,suffix) # '!handle!suffix'
+// SCALAR(value,style) # A scalar.
+//
+// The following two tokens are "virtual" tokens denoting the beginning and the
+// end of the stream:
+//
+// STREAM-START(encoding)
+// STREAM-END
+//
+// We pass the information about the input stream encoding with the
+// STREAM-START token.
+//
+// The next two tokens are responsible for tags:
+//
+// VERSION-DIRECTIVE(major,minor)
+// TAG-DIRECTIVE(handle,prefix)
+//
+// Example:
+//
+// %YAML 1.1
+// %TAG ! !foo
+// %TAG !yaml! tag:yaml.org,2002:
+// ---
+//
+// The correspoding sequence of tokens:
+//
+// STREAM-START(utf-8)
+// VERSION-DIRECTIVE(1,1)
+// TAG-DIRECTIVE("!","!foo")
+// TAG-DIRECTIVE("!yaml","tag:yaml.org,2002:")
+// DOCUMENT-START
+// STREAM-END
+//
+// Note that the VERSION-DIRECTIVE and TAG-DIRECTIVE tokens occupy a whole
+// line.
+//
+// The document start and end indicators are represented by:
+//
+// DOCUMENT-START
+// DOCUMENT-END
+//
+// Note that if a YAML stream contains an implicit document (without '---'
+// and '...' indicators), no DOCUMENT-START and DOCUMENT-END tokens will be
+// produced.
+//
+// In the following examples, we present whole documents together with the
+// produced tokens.
+//
+// 1. An implicit document:
+//
+// 'a scalar'
+//
+// Tokens:
+//
+// STREAM-START(utf-8)
+// SCALAR("a scalar",single-quoted)
+// STREAM-END
+//
+// 2. An explicit document:
+//
+// ---
+// 'a scalar'
+// ...
+//
+// Tokens:
+//
+// STREAM-START(utf-8)
+// DOCUMENT-START
+// SCALAR("a scalar",single-quoted)
+// DOCUMENT-END
+// STREAM-END
+//
+// 3. Several documents in a stream:
+//
+// 'a scalar'
+// ---
+// 'another scalar'
+// ---
+// 'yet another scalar'
+//
+// Tokens:
+//
+// STREAM-START(utf-8)
+// SCALAR("a scalar",single-quoted)
+// DOCUMENT-START
+// SCALAR("another scalar",single-quoted)
+// DOCUMENT-START
+// SCALAR("yet another scalar",single-quoted)
+// STREAM-END
+//
+// We have already introduced the SCALAR token above. The following tokens are
+// used to describe aliases, anchors, tag, and scalars:
+//
+// ALIAS(anchor)
+// ANCHOR(anchor)
+// TAG(handle,suffix)
+// SCALAR(value,style)
+//
+// The following series of examples illustrate the usage of these tokens:
+//
+// 1. A recursive sequence:
+//
+// &A [ *A ]
+//
+// Tokens:
+//
+// STREAM-START(utf-8)
+// ANCHOR("A")
+// FLOW-SEQUENCE-START
+// ALIAS("A")
+// FLOW-SEQUENCE-END
+// STREAM-END
+//
+// 2. A tagged scalar:
+//
+// !!float "3.14" # A good approximation.
+//
+// Tokens:
+//
+// STREAM-START(utf-8)
+// TAG("!!","float")
+// SCALAR("3.14",double-quoted)
+// STREAM-END
+//
+// 3. Various scalar styles:
+//
+// --- # Implicit empty plain scalars do not produce tokens.
+// --- a plain scalar
+// --- 'a single-quoted scalar'
+// --- "a double-quoted scalar"
+// --- |-
+// a literal scalar
+// --- >-
+// a folded
+// scalar
+//
+// Tokens:
+//
+// STREAM-START(utf-8)
+// DOCUMENT-START
+// DOCUMENT-START
+// SCALAR("a plain scalar",plain)
+// DOCUMENT-START
+// SCALAR("a single-quoted scalar",single-quoted)
+// DOCUMENT-START
+// SCALAR("a double-quoted scalar",double-quoted)
+// DOCUMENT-START
+// SCALAR("a literal scalar",literal)
+// DOCUMENT-START
+// SCALAR("a folded scalar",folded)
+// STREAM-END
+//
+// Now it's time to review collection-related tokens. We will start with
+// flow collections:
+//
+// FLOW-SEQUENCE-START
+// FLOW-SEQUENCE-END
+// FLOW-MAPPING-START
+// FLOW-MAPPING-END
+// FLOW-ENTRY
+// KEY
+// VALUE
+//
+// The tokens FLOW-SEQUENCE-START, FLOW-SEQUENCE-END, FLOW-MAPPING-START, and
+// FLOW-MAPPING-END represent the indicators '[', ']', '{', and '}'
+// correspondingly. FLOW-ENTRY represent the ',' indicator. Finally the
+// indicators '?' and ':', which are used for denoting mapping keys and values,
+// are represented by the KEY and VALUE tokens.
+//
+// The following examples show flow collections:
+//
+// 1. A flow sequence:
+//
+// [item 1, item 2, item 3]
+//
+// Tokens:
+//
+// STREAM-START(utf-8)
+// FLOW-SEQUENCE-START
+// SCALAR("item 1",plain)
+// FLOW-ENTRY
+// SCALAR("item 2",plain)
+// FLOW-ENTRY
+// SCALAR("item 3",plain)
+// FLOW-SEQUENCE-END
+// STREAM-END
+//
+// 2. A flow mapping:
+//
+// {
+// a simple key: a value, # Note that the KEY token is produced.
+// ? a complex key: another value,
+// }
+//
+// Tokens:
+//
+// STREAM-START(utf-8)
+// FLOW-MAPPING-START
+// KEY
+// SCALAR("a simple key",plain)
+// VALUE
+// SCALAR("a value",plain)
+// FLOW-ENTRY
+// KEY
+// SCALAR("a complex key",plain)
+// VALUE
+// SCALAR("another value",plain)
+// FLOW-ENTRY
+// FLOW-MAPPING-END
+// STREAM-END
+//
+// A simple key is a key which is not denoted by the '?' indicator. Note that
+// the Scanner still produce the KEY token whenever it encounters a simple key.
+//
+// For scanning block collections, the following tokens are used (note that we
+// repeat KEY and VALUE here):
+//
+// BLOCK-SEQUENCE-START
+// BLOCK-MAPPING-START
+// BLOCK-END
+// BLOCK-ENTRY
+// KEY
+// VALUE
+//
+// The tokens BLOCK-SEQUENCE-START and BLOCK-MAPPING-START denote indentation
+// increase that precedes a block collection (cf. the INDENT token in Python).
+// The token BLOCK-END denote indentation decrease that ends a block collection
+// (cf. the DEDENT token in Python). However YAML has some syntax pecularities
+// that makes detections of these tokens more complex.
+//
+// The tokens BLOCK-ENTRY, KEY, and VALUE are used to represent the indicators
+// '-', '?', and ':' correspondingly.
+//
+// The following examples show how the tokens BLOCK-SEQUENCE-START,
+// BLOCK-MAPPING-START, and BLOCK-END are emitted by the Scanner:
+//
+// 1. Block sequences:
+//
+// - item 1
+// - item 2
+// -
+// - item 3.1
+// - item 3.2
+// -
+// key 1: value 1
+// key 2: value 2
+//
+// Tokens:
+//
+// STREAM-START(utf-8)
+// BLOCK-SEQUENCE-START
+// BLOCK-ENTRY
+// SCALAR("item 1",plain)
+// BLOCK-ENTRY
+// SCALAR("item 2",plain)
+// BLOCK-ENTRY
+// BLOCK-SEQUENCE-START
+// BLOCK-ENTRY
+// SCALAR("item 3.1",plain)
+// BLOCK-ENTRY
+// SCALAR("item 3.2",plain)
+// BLOCK-END
+// BLOCK-ENTRY
+// BLOCK-MAPPING-START
+// KEY
+// SCALAR("key 1",plain)
+// VALUE
+// SCALAR("value 1",plain)
+// KEY
+// SCALAR("key 2",plain)
+// VALUE
+// SCALAR("value 2",plain)
+// BLOCK-END
+// BLOCK-END
+// STREAM-END
+//
+// 2. Block mappings:
+//
+// a simple key: a value # The KEY token is produced here.
+// ? a complex key
+// : another value
+// a mapping:
+// key 1: value 1
+// key 2: value 2
+// a sequence:
+// - item 1
+// - item 2
+//
+// Tokens:
+//
+// STREAM-START(utf-8)
+// BLOCK-MAPPING-START
+// KEY
+// SCALAR("a simple key",plain)
+// VALUE
+// SCALAR("a value",plain)
+// KEY
+// SCALAR("a complex key",plain)
+// VALUE
+// SCALAR("another value",plain)
+// KEY
+// SCALAR("a mapping",plain)
+// BLOCK-MAPPING-START
+// KEY
+// SCALAR("key 1",plain)
+// VALUE
+// SCALAR("value 1",plain)
+// KEY
+// SCALAR("key 2",plain)
+// VALUE
+// SCALAR("value 2",plain)
+// BLOCK-END
+// KEY
+// SCALAR("a sequence",plain)
+// VALUE
+// BLOCK-SEQUENCE-START
+// BLOCK-ENTRY
+// SCALAR("item 1",plain)
+// BLOCK-ENTRY
+// SCALAR("item 2",plain)
+// BLOCK-END
+// BLOCK-END
+// STREAM-END
+//
+// YAML does not always require to start a new block collection from a new
+// line. If the current line contains only '-', '?', and ':' indicators, a new
+// block collection may start at the current line. The following examples
+// illustrate this case:
+//
+// 1. Collections in a sequence:
+//
+// - - item 1
+// - item 2
+// - key 1: value 1
+// key 2: value 2
+// - ? complex key
+// : complex value
+//
+// Tokens:
+//
+// STREAM-START(utf-8)
+// BLOCK-SEQUENCE-START
+// BLOCK-ENTRY
+// BLOCK-SEQUENCE-START
+// BLOCK-ENTRY
+// SCALAR("item 1",plain)
+// BLOCK-ENTRY
+// SCALAR("item 2",plain)
+// BLOCK-END
+// BLOCK-ENTRY
+// BLOCK-MAPPING-START
+// KEY
+// SCALAR("key 1",plain)
+// VALUE
+// SCALAR("value 1",plain)
+// KEY
+// SCALAR("key 2",plain)
+// VALUE
+// SCALAR("value 2",plain)
+// BLOCK-END
+// BLOCK-ENTRY
+// BLOCK-MAPPING-START
+// KEY
+// SCALAR("complex key")
+// VALUE
+// SCALAR("complex value")
+// BLOCK-END
+// BLOCK-END
+// STREAM-END
+//
+// 2. Collections in a mapping:
+//
+// ? a sequence
+// : - item 1
+// - item 2
+// ? a mapping
+// : key 1: value 1
+// key 2: value 2
+//
+// Tokens:
+//
+// STREAM-START(utf-8)
+// BLOCK-MAPPING-START
+// KEY
+// SCALAR("a sequence",plain)
+// VALUE
+// BLOCK-SEQUENCE-START
+// BLOCK-ENTRY
+// SCALAR("item 1",plain)
+// BLOCK-ENTRY
+// SCALAR("item 2",plain)
+// BLOCK-END
+// KEY
+// SCALAR("a mapping",plain)
+// VALUE
+// BLOCK-MAPPING-START
+// KEY
+// SCALAR("key 1",plain)
+// VALUE
+// SCALAR("value 1",plain)
+// KEY
+// SCALAR("key 2",plain)
+// VALUE
+// SCALAR("value 2",plain)
+// BLOCK-END
+// BLOCK-END
+// STREAM-END
+//
+// YAML also permits non-indented sequences if they are included into a block
+// mapping. In this case, the token BLOCK-SEQUENCE-START is not produced:
+//
+// key:
+// - item 1 # BLOCK-SEQUENCE-START is NOT produced here.
+// - item 2
+//
+// Tokens:
+//
+// STREAM-START(utf-8)
+// BLOCK-MAPPING-START
+// KEY
+// SCALAR("key",plain)
+// VALUE
+// BLOCK-ENTRY
+// SCALAR("item 1",plain)
+// BLOCK-ENTRY
+// SCALAR("item 2",plain)
+// BLOCK-END
+//
+
+// Ensure that the buffer contains the required number of characters.
+// Return true on success, false on failure (reader error or memory error).
+func cache(parser *yaml_parser_t, length int) bool {
+ // [Go] This was inlined: !cache(A, B) -> unread < B && !update(A, B)
+ return parser.unread >= length || yaml_parser_update_buffer(parser, length)
+}
+
+// Advance the buffer pointer.
+func skip(parser *yaml_parser_t) {
+ parser.mark.index++
+ parser.mark.column++
+ parser.unread--
+ parser.buffer_pos += width(parser.buffer[parser.buffer_pos])
+}
+
+func skip_line(parser *yaml_parser_t) {
+ if is_crlf(parser.buffer, parser.buffer_pos) {
+ parser.mark.index += 2
+ parser.mark.column = 0
+ parser.mark.line++
+ parser.unread -= 2
+ parser.buffer_pos += 2
+ } else if is_break(parser.buffer, parser.buffer_pos) {
+ parser.mark.index++
+ parser.mark.column = 0
+ parser.mark.line++
+ parser.unread--
+ parser.buffer_pos += width(parser.buffer[parser.buffer_pos])
+ }
+}
+
+// Copy a character to a string buffer and advance pointers.
+func read(parser *yaml_parser_t, s []byte) []byte {
+ w := width(parser.buffer[parser.buffer_pos])
+ if w == 0 {
+ panic("invalid character sequence")
+ }
+ if len(s) == 0 {
+ s = make([]byte, 0, 32)
+ }
+ if w == 1 && len(s)+w <= cap(s) {
+ s = s[:len(s)+1]
+ s[len(s)-1] = parser.buffer[parser.buffer_pos]
+ parser.buffer_pos++
+ } else {
+ s = append(s, parser.buffer[parser.buffer_pos:parser.buffer_pos+w]...)
+ parser.buffer_pos += w
+ }
+ parser.mark.index++
+ parser.mark.column++
+ parser.unread--
+ return s
+}
+
+// Copy a line break character to a string buffer and advance pointers.
+func read_line(parser *yaml_parser_t, s []byte) []byte {
+ buf := parser.buffer
+ pos := parser.buffer_pos
+ switch {
+ case buf[pos] == '\r' && buf[pos+1] == '\n':
+ // CR LF . LF
+ s = append(s, '\n')
+ parser.buffer_pos += 2
+ parser.mark.index++
+ parser.unread--
+ case buf[pos] == '\r' || buf[pos] == '\n':
+ // CR|LF . LF
+ s = append(s, '\n')
+ parser.buffer_pos += 1
+ case buf[pos] == '\xC2' && buf[pos+1] == '\x85':
+ // NEL . LF
+ s = append(s, '\n')
+ parser.buffer_pos += 2
+ case buf[pos] == '\xE2' && buf[pos+1] == '\x80' && (buf[pos+2] == '\xA8' || buf[pos+2] == '\xA9'):
+ // LS|PS . LS|PS
+ s = append(s, buf[parser.buffer_pos:pos+3]...)
+ parser.buffer_pos += 3
+ default:
+ return s
+ }
+ parser.mark.index++
+ parser.mark.column = 0
+ parser.mark.line++
+ parser.unread--
+ return s
+}
+
+// Get the next token.
+func yaml_parser_scan(parser *yaml_parser_t, token *yaml_token_t) bool {
+ // Erase the token object.
+ *token = yaml_token_t{} // [Go] Is this necessary?
+
+ // No tokens after STREAM-END or error.
+ if parser.stream_end_produced || parser.error != yaml_NO_ERROR {
+ return true
+ }
+
+ // Ensure that the tokens queue contains enough tokens.
+ if !parser.token_available {
+ if !yaml_parser_fetch_more_tokens(parser) {
+ return false
+ }
+ }
+
+ // Fetch the next token from the queue.
+ *token = parser.tokens[parser.tokens_head]
+ parser.tokens_head++
+ parser.tokens_parsed++
+ parser.token_available = false
+
+ if token.typ == yaml_STREAM_END_TOKEN {
+ parser.stream_end_produced = true
+ }
+ return true
+}
+
+// Set the scanner error and return false.
+func yaml_parser_set_scanner_error(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string) bool {
+ parser.error = yaml_SCANNER_ERROR
+ parser.context = context
+ parser.context_mark = context_mark
+ parser.problem = problem
+ parser.problem_mark = parser.mark
+ return false
+}
+
+func yaml_parser_set_scanner_tag_error(parser *yaml_parser_t, directive bool, context_mark yaml_mark_t, problem string) bool {
+ context := "while parsing a tag"
+ if directive {
+ context = "while parsing a %TAG directive"
+ }
+ return yaml_parser_set_scanner_error(parser, context, context_mark, "did not find URI escaped octet")
+}
+
+func trace(args ...interface{}) func() {
+ pargs := append([]interface{}{"+++"}, args...)
+ fmt.Println(pargs...)
+ pargs = append([]interface{}{"---"}, args...)
+ return func() { fmt.Println(pargs...) }
+}
+
+// Ensure that the tokens queue contains at least one token which can be
+// returned to the Parser.
+func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool {
+ // While we need more tokens to fetch, do it.
+ for {
+ // Check if we really need to fetch more tokens.
+ need_more_tokens := false
+
+ if parser.tokens_head == len(parser.tokens) {
+ // Queue is empty.
+ need_more_tokens = true
+ } else {
+ // Check if any potential simple key may occupy the head position.
+ if !yaml_parser_stale_simple_keys(parser) {
+ return false
+ }
+
+ for i := range parser.simple_keys {
+ simple_key := &parser.simple_keys[i]
+ if simple_key.possible && simple_key.token_number == parser.tokens_parsed {
+ need_more_tokens = true
+ break
+ }
+ }
+ }
+
+ // We are finished.
+ if !need_more_tokens {
+ break
+ }
+ // Fetch the next token.
+ if !yaml_parser_fetch_next_token(parser) {
+ return false
+ }
+ }
+
+ parser.token_available = true
+ return true
+}
+
+// The dispatcher for token fetchers.
+func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool {
+ // Ensure that the buffer is initialized.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+
+ // Check if we just started scanning. Fetch STREAM-START then.
+ if !parser.stream_start_produced {
+ return yaml_parser_fetch_stream_start(parser)
+ }
+
+ // Eat whitespaces and comments until we reach the next token.
+ if !yaml_parser_scan_to_next_token(parser) {
+ return false
+ }
+
+ // Remove obsolete potential simple keys.
+ if !yaml_parser_stale_simple_keys(parser) {
+ return false
+ }
+
+ // Check the indentation level against the current column.
+ if !yaml_parser_unroll_indent(parser, parser.mark.column) {
+ return false
+ }
+
+ // Ensure that the buffer contains at least 4 characters. 4 is the length
+ // of the longest indicators ('--- ' and '... ').
+ if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) {
+ return false
+ }
+
+ // Is it the end of the stream?
+ if is_z(parser.buffer, parser.buffer_pos) {
+ return yaml_parser_fetch_stream_end(parser)
+ }
+
+ // Is it a directive?
+ if parser.mark.column == 0 && parser.buffer[parser.buffer_pos] == '%' {
+ return yaml_parser_fetch_directive(parser)
+ }
+
+ buf := parser.buffer
+ pos := parser.buffer_pos
+
+ // Is it the document start indicator?
+ if parser.mark.column == 0 && buf[pos] == '-' && buf[pos+1] == '-' && buf[pos+2] == '-' && is_blankz(buf, pos+3) {
+ return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_START_TOKEN)
+ }
+
+ // Is it the document end indicator?
+ if parser.mark.column == 0 && buf[pos] == '.' && buf[pos+1] == '.' && buf[pos+2] == '.' && is_blankz(buf, pos+3) {
+ return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_END_TOKEN)
+ }
+
+ // Is it the flow sequence start indicator?
+ if buf[pos] == '[' {
+ return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_SEQUENCE_START_TOKEN)
+ }
+
+ // Is it the flow mapping start indicator?
+ if parser.buffer[parser.buffer_pos] == '{' {
+ return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_MAPPING_START_TOKEN)
+ }
+
+ // Is it the flow sequence end indicator?
+ if parser.buffer[parser.buffer_pos] == ']' {
+ return yaml_parser_fetch_flow_collection_end(parser,
+ yaml_FLOW_SEQUENCE_END_TOKEN)
+ }
+
+ // Is it the flow mapping end indicator?
+ if parser.buffer[parser.buffer_pos] == '}' {
+ return yaml_parser_fetch_flow_collection_end(parser,
+ yaml_FLOW_MAPPING_END_TOKEN)
+ }
+
+ // Is it the flow entry indicator?
+ if parser.buffer[parser.buffer_pos] == ',' {
+ return yaml_parser_fetch_flow_entry(parser)
+ }
+
+ // Is it the block entry indicator?
+ if parser.buffer[parser.buffer_pos] == '-' && is_blankz(parser.buffer, parser.buffer_pos+1) {
+ return yaml_parser_fetch_block_entry(parser)
+ }
+
+ // Is it the key indicator?
+ if parser.buffer[parser.buffer_pos] == '?' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) {
+ return yaml_parser_fetch_key(parser)
+ }
+
+ // Is it the value indicator?
+ if parser.buffer[parser.buffer_pos] == ':' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) {
+ return yaml_parser_fetch_value(parser)
+ }
+
+ // Is it an alias?
+ if parser.buffer[parser.buffer_pos] == '*' {
+ return yaml_parser_fetch_anchor(parser, yaml_ALIAS_TOKEN)
+ }
+
+ // Is it an anchor?
+ if parser.buffer[parser.buffer_pos] == '&' {
+ return yaml_parser_fetch_anchor(parser, yaml_ANCHOR_TOKEN)
+ }
+
+ // Is it a tag?
+ if parser.buffer[parser.buffer_pos] == '!' {
+ return yaml_parser_fetch_tag(parser)
+ }
+
+ // Is it a literal scalar?
+ if parser.buffer[parser.buffer_pos] == '|' && parser.flow_level == 0 {
+ return yaml_parser_fetch_block_scalar(parser, true)
+ }
+
+ // Is it a folded scalar?
+ if parser.buffer[parser.buffer_pos] == '>' && parser.flow_level == 0 {
+ return yaml_parser_fetch_block_scalar(parser, false)
+ }
+
+ // Is it a single-quoted scalar?
+ if parser.buffer[parser.buffer_pos] == '\'' {
+ return yaml_parser_fetch_flow_scalar(parser, true)
+ }
+
+ // Is it a double-quoted scalar?
+ if parser.buffer[parser.buffer_pos] == '"' {
+ return yaml_parser_fetch_flow_scalar(parser, false)
+ }
+
+ // Is it a plain scalar?
+ //
+ // A plain scalar may start with any non-blank characters except
+ //
+ // '-', '?', ':', ',', '[', ']', '{', '}',
+ // '#', '&', '*', '!', '|', '>', '\'', '\"',
+ // '%', '@', '`'.
+ //
+ // In the block context (and, for the '-' indicator, in the flow context
+ // too), it may also start with the characters
+ //
+ // '-', '?', ':'
+ //
+ // if it is followed by a non-space character.
+ //
+ // The last rule is more restrictive than the specification requires.
+ // [Go] Make this logic more reasonable.
+ //switch parser.buffer[parser.buffer_pos] {
+ //case '-', '?', ':', ',', '?', '-', ',', ':', ']', '[', '}', '{', '&', '#', '!', '*', '>', '|', '"', '\'', '@', '%', '-', '`':
+ //}
+ if !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '-' ||
+ parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':' ||
+ parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '[' ||
+ parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' ||
+ parser.buffer[parser.buffer_pos] == '}' || parser.buffer[parser.buffer_pos] == '#' ||
+ parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '*' ||
+ parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '|' ||
+ parser.buffer[parser.buffer_pos] == '>' || parser.buffer[parser.buffer_pos] == '\'' ||
+ parser.buffer[parser.buffer_pos] == '"' || parser.buffer[parser.buffer_pos] == '%' ||
+ parser.buffer[parser.buffer_pos] == '@' || parser.buffer[parser.buffer_pos] == '`') ||
+ (parser.buffer[parser.buffer_pos] == '-' && !is_blank(parser.buffer, parser.buffer_pos+1)) ||
+ (parser.flow_level == 0 &&
+ (parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':') &&
+ !is_blankz(parser.buffer, parser.buffer_pos+1)) {
+ return yaml_parser_fetch_plain_scalar(parser)
+ }
+
+ // If we don't determine the token type so far, it is an error.
+ return yaml_parser_set_scanner_error(parser,
+ "while scanning for the next token", parser.mark,
+ "found character that cannot start any token")
+}
+
+// Check the list of potential simple keys and remove the positions that
+// cannot contain simple keys anymore.
+func yaml_parser_stale_simple_keys(parser *yaml_parser_t) bool {
+ // Check for a potential simple key for each flow level.
+ for i := range parser.simple_keys {
+ simple_key := &parser.simple_keys[i]
+
+ // The specification requires that a simple key
+ //
+ // - is limited to a single line,
+ // - is shorter than 1024 characters.
+ if simple_key.possible && (simple_key.mark.line < parser.mark.line || simple_key.mark.index+1024 < parser.mark.index) {
+
+ // Check if the potential simple key to be removed is required.
+ if simple_key.required {
+ return yaml_parser_set_scanner_error(parser,
+ "while scanning a simple key", simple_key.mark,
+ "could not find expected ':'")
+ }
+ simple_key.possible = false
+ }
+ }
+ return true
+}
+
+// Check if a simple key may start at the current position and add it if
+// needed.
+func yaml_parser_save_simple_key(parser *yaml_parser_t) bool {
+ // A simple key is required at the current position if the scanner is in
+ // the block context and the current column coincides with the indentation
+ // level.
+
+ required := parser.flow_level == 0 && parser.indent == parser.mark.column
+
+ // A simple key is required only when it is the first token in the current
+ // line. Therefore it is always allowed. But we add a check anyway.
+ if required && !parser.simple_key_allowed {
+ panic("should not happen")
+ }
+
+ //
+ // If the current position may start a simple key, save it.
+ //
+ if parser.simple_key_allowed {
+ simple_key := yaml_simple_key_t{
+ possible: true,
+ required: required,
+ token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head),
+ }
+ simple_key.mark = parser.mark
+
+ if !yaml_parser_remove_simple_key(parser) {
+ return false
+ }
+ parser.simple_keys[len(parser.simple_keys)-1] = simple_key
+ }
+ return true
+}
+
+// Remove a potential simple key at the current flow level.
+func yaml_parser_remove_simple_key(parser *yaml_parser_t) bool {
+ i := len(parser.simple_keys) - 1
+ if parser.simple_keys[i].possible {
+ // If the key is required, it is an error.
+ if parser.simple_keys[i].required {
+ return yaml_parser_set_scanner_error(parser,
+ "while scanning a simple key", parser.simple_keys[i].mark,
+ "could not find expected ':'")
+ }
+ }
+ // Remove the key from the stack.
+ parser.simple_keys[i].possible = false
+ return true
+}
+
+// Increase the flow level and resize the simple key list if needed.
+func yaml_parser_increase_flow_level(parser *yaml_parser_t) bool {
+ // Reset the simple key on the next level.
+ parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{})
+
+ // Increase the flow level.
+ parser.flow_level++
+ return true
+}
+
+// Decrease the flow level.
+func yaml_parser_decrease_flow_level(parser *yaml_parser_t) bool {
+ if parser.flow_level > 0 {
+ parser.flow_level--
+ parser.simple_keys = parser.simple_keys[:len(parser.simple_keys)-1]
+ }
+ return true
+}
+
+// Push the current indentation level to the stack and set the new level
+// the current column is greater than the indentation level. In this case,
+// append or insert the specified token into the token queue.
+func yaml_parser_roll_indent(parser *yaml_parser_t, column, number int, typ yaml_token_type_t, mark yaml_mark_t) bool {
+ // In the flow context, do nothing.
+ if parser.flow_level > 0 {
+ return true
+ }
+
+ if parser.indent < column {
+ // Push the current indentation level to the stack and set the new
+ // indentation level.
+ parser.indents = append(parser.indents, parser.indent)
+ parser.indent = column
+
+ // Create a token and insert it into the queue.
+ token := yaml_token_t{
+ typ: typ,
+ start_mark: mark,
+ end_mark: mark,
+ }
+ if number > -1 {
+ number -= parser.tokens_parsed
+ }
+ yaml_insert_token(parser, number, &token)
+ }
+ return true
+}
+
+// Pop indentation levels from the indents stack until the current level
+// becomes less or equal to the column. For each intendation level, append
+// the BLOCK-END token.
+func yaml_parser_unroll_indent(parser *yaml_parser_t, column int) bool {
+ // In the flow context, do nothing.
+ if parser.flow_level > 0 {
+ return true
+ }
+
+ // Loop through the intendation levels in the stack.
+ for parser.indent > column {
+ // Create a token and append it to the queue.
+ token := yaml_token_t{
+ typ: yaml_BLOCK_END_TOKEN,
+ start_mark: parser.mark,
+ end_mark: parser.mark,
+ }
+ yaml_insert_token(parser, -1, &token)
+
+ // Pop the indentation level.
+ parser.indent = parser.indents[len(parser.indents)-1]
+ parser.indents = parser.indents[:len(parser.indents)-1]
+ }
+ return true
+}
+
+// Initialize the scanner and produce the STREAM-START token.
+func yaml_parser_fetch_stream_start(parser *yaml_parser_t) bool {
+
+ // Set the initial indentation.
+ parser.indent = -1
+
+ // Initialize the simple key stack.
+ parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{})
+
+ // A simple key is allowed at the beginning of the stream.
+ parser.simple_key_allowed = true
+
+ // We have started.
+ parser.stream_start_produced = true
+
+ // Create the STREAM-START token and append it to the queue.
+ token := yaml_token_t{
+ typ: yaml_STREAM_START_TOKEN,
+ start_mark: parser.mark,
+ end_mark: parser.mark,
+ encoding: parser.encoding,
+ }
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Produce the STREAM-END token and shut down the scanner.
+func yaml_parser_fetch_stream_end(parser *yaml_parser_t) bool {
+
+ // Force new line.
+ if parser.mark.column != 0 {
+ parser.mark.column = 0
+ parser.mark.line++
+ }
+
+ // Reset the indentation level.
+ if !yaml_parser_unroll_indent(parser, -1) {
+ return false
+ }
+
+ // Reset simple keys.
+ if !yaml_parser_remove_simple_key(parser) {
+ return false
+ }
+
+ parser.simple_key_allowed = false
+
+ // Create the STREAM-END token and append it to the queue.
+ token := yaml_token_t{
+ typ: yaml_STREAM_END_TOKEN,
+ start_mark: parser.mark,
+ end_mark: parser.mark,
+ }
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Produce a VERSION-DIRECTIVE or TAG-DIRECTIVE token.
+func yaml_parser_fetch_directive(parser *yaml_parser_t) bool {
+ // Reset the indentation level.
+ if !yaml_parser_unroll_indent(parser, -1) {
+ return false
+ }
+
+ // Reset simple keys.
+ if !yaml_parser_remove_simple_key(parser) {
+ return false
+ }
+
+ parser.simple_key_allowed = false
+
+ // Create the YAML-DIRECTIVE or TAG-DIRECTIVE token.
+ token := yaml_token_t{}
+ if !yaml_parser_scan_directive(parser, &token) {
+ return false
+ }
+ // Append the token to the queue.
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Produce the DOCUMENT-START or DOCUMENT-END token.
+func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, typ yaml_token_type_t) bool {
+ // Reset the indentation level.
+ if !yaml_parser_unroll_indent(parser, -1) {
+ return false
+ }
+
+ // Reset simple keys.
+ if !yaml_parser_remove_simple_key(parser) {
+ return false
+ }
+
+ parser.simple_key_allowed = false
+
+ // Consume the token.
+ start_mark := parser.mark
+
+ skip(parser)
+ skip(parser)
+ skip(parser)
+
+ end_mark := parser.mark
+
+ // Create the DOCUMENT-START or DOCUMENT-END token.
+ token := yaml_token_t{
+ typ: typ,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ }
+ // Append the token to the queue.
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
+func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, typ yaml_token_type_t) bool {
+ // The indicators '[' and '{' may start a simple key.
+ if !yaml_parser_save_simple_key(parser) {
+ return false
+ }
+
+ // Increase the flow level.
+ if !yaml_parser_increase_flow_level(parser) {
+ return false
+ }
+
+ // A simple key may follow the indicators '[' and '{'.
+ parser.simple_key_allowed = true
+
+ // Consume the token.
+ start_mark := parser.mark
+ skip(parser)
+ end_mark := parser.mark
+
+ // Create the FLOW-SEQUENCE-START of FLOW-MAPPING-START token.
+ token := yaml_token_t{
+ typ: typ,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ }
+ // Append the token to the queue.
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Produce the FLOW-SEQUENCE-END or FLOW-MAPPING-END token.
+func yaml_parser_fetch_flow_collection_end(parser *yaml_parser_t, typ yaml_token_type_t) bool {
+ // Reset any potential simple key on the current flow level.
+ if !yaml_parser_remove_simple_key(parser) {
+ return false
+ }
+
+ // Decrease the flow level.
+ if !yaml_parser_decrease_flow_level(parser) {
+ return false
+ }
+
+ // No simple keys after the indicators ']' and '}'.
+ parser.simple_key_allowed = false
+
+ // Consume the token.
+
+ start_mark := parser.mark
+ skip(parser)
+ end_mark := parser.mark
+
+ // Create the FLOW-SEQUENCE-END of FLOW-MAPPING-END token.
+ token := yaml_token_t{
+ typ: typ,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ }
+ // Append the token to the queue.
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Produce the FLOW-ENTRY token.
+func yaml_parser_fetch_flow_entry(parser *yaml_parser_t) bool {
+ // Reset any potential simple keys on the current flow level.
+ if !yaml_parser_remove_simple_key(parser) {
+ return false
+ }
+
+ // Simple keys are allowed after ','.
+ parser.simple_key_allowed = true
+
+ // Consume the token.
+ start_mark := parser.mark
+ skip(parser)
+ end_mark := parser.mark
+
+ // Create the FLOW-ENTRY token and append it to the queue.
+ token := yaml_token_t{
+ typ: yaml_FLOW_ENTRY_TOKEN,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ }
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Produce the BLOCK-ENTRY token.
+func yaml_parser_fetch_block_entry(parser *yaml_parser_t) bool {
+ // Check if the scanner is in the block context.
+ if parser.flow_level == 0 {
+ // Check if we are allowed to start a new entry.
+ if !parser.simple_key_allowed {
+ return yaml_parser_set_scanner_error(parser, "", parser.mark,
+ "block sequence entries are not allowed in this context")
+ }
+ // Add the BLOCK-SEQUENCE-START token if needed.
+ if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_SEQUENCE_START_TOKEN, parser.mark) {
+ return false
+ }
+ } else {
+ // It is an error for the '-' indicator to occur in the flow context,
+ // but we let the Parser detect and report about it because the Parser
+ // is able to point to the context.
+ }
+
+ // Reset any potential simple keys on the current flow level.
+ if !yaml_parser_remove_simple_key(parser) {
+ return false
+ }
+
+ // Simple keys are allowed after '-'.
+ parser.simple_key_allowed = true
+
+ // Consume the token.
+ start_mark := parser.mark
+ skip(parser)
+ end_mark := parser.mark
+
+ // Create the BLOCK-ENTRY token and append it to the queue.
+ token := yaml_token_t{
+ typ: yaml_BLOCK_ENTRY_TOKEN,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ }
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Produce the KEY token.
+func yaml_parser_fetch_key(parser *yaml_parser_t) bool {
+
+ // In the block context, additional checks are required.
+ if parser.flow_level == 0 {
+ // Check if we are allowed to start a new key (not nessesary simple).
+ if !parser.simple_key_allowed {
+ return yaml_parser_set_scanner_error(parser, "", parser.mark,
+ "mapping keys are not allowed in this context")
+ }
+ // Add the BLOCK-MAPPING-START token if needed.
+ if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) {
+ return false
+ }
+ }
+
+ // Reset any potential simple keys on the current flow level.
+ if !yaml_parser_remove_simple_key(parser) {
+ return false
+ }
+
+ // Simple keys are allowed after '?' in the block context.
+ parser.simple_key_allowed = parser.flow_level == 0
+
+ // Consume the token.
+ start_mark := parser.mark
+ skip(parser)
+ end_mark := parser.mark
+
+ // Create the KEY token and append it to the queue.
+ token := yaml_token_t{
+ typ: yaml_KEY_TOKEN,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ }
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Produce the VALUE token.
+func yaml_parser_fetch_value(parser *yaml_parser_t) bool {
+
+ simple_key := &parser.simple_keys[len(parser.simple_keys)-1]
+
+ // Have we found a simple key?
+ if simple_key.possible {
+ // Create the KEY token and insert it into the queue.
+ token := yaml_token_t{
+ typ: yaml_KEY_TOKEN,
+ start_mark: simple_key.mark,
+ end_mark: simple_key.mark,
+ }
+ yaml_insert_token(parser, simple_key.token_number-parser.tokens_parsed, &token)
+
+ // In the block context, we may need to add the BLOCK-MAPPING-START token.
+ if !yaml_parser_roll_indent(parser, simple_key.mark.column,
+ simple_key.token_number,
+ yaml_BLOCK_MAPPING_START_TOKEN, simple_key.mark) {
+ return false
+ }
+
+ // Remove the simple key.
+ simple_key.possible = false
+
+ // A simple key cannot follow another simple key.
+ parser.simple_key_allowed = false
+
+ } else {
+ // The ':' indicator follows a complex key.
+
+ // In the block context, extra checks are required.
+ if parser.flow_level == 0 {
+
+ // Check if we are allowed to start a complex value.
+ if !parser.simple_key_allowed {
+ return yaml_parser_set_scanner_error(parser, "", parser.mark,
+ "mapping values are not allowed in this context")
+ }
+
+ // Add the BLOCK-MAPPING-START token if needed.
+ if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) {
+ return false
+ }
+ }
+
+ // Simple keys after ':' are allowed in the block context.
+ parser.simple_key_allowed = parser.flow_level == 0
+ }
+
+ // Consume the token.
+ start_mark := parser.mark
+ skip(parser)
+ end_mark := parser.mark
+
+ // Create the VALUE token and append it to the queue.
+ token := yaml_token_t{
+ typ: yaml_VALUE_TOKEN,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ }
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Produce the ALIAS or ANCHOR token.
+func yaml_parser_fetch_anchor(parser *yaml_parser_t, typ yaml_token_type_t) bool {
+ // An anchor or an alias could be a simple key.
+ if !yaml_parser_save_simple_key(parser) {
+ return false
+ }
+
+ // A simple key cannot follow an anchor or an alias.
+ parser.simple_key_allowed = false
+
+ // Create the ALIAS or ANCHOR token and append it to the queue.
+ var token yaml_token_t
+ if !yaml_parser_scan_anchor(parser, &token, typ) {
+ return false
+ }
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Produce the TAG token.
+func yaml_parser_fetch_tag(parser *yaml_parser_t) bool {
+ // A tag could be a simple key.
+ if !yaml_parser_save_simple_key(parser) {
+ return false
+ }
+
+ // A simple key cannot follow a tag.
+ parser.simple_key_allowed = false
+
+ // Create the TAG token and append it to the queue.
+ var token yaml_token_t
+ if !yaml_parser_scan_tag(parser, &token) {
+ return false
+ }
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Produce the SCALAR(...,literal) or SCALAR(...,folded) tokens.
+func yaml_parser_fetch_block_scalar(parser *yaml_parser_t, literal bool) bool {
+ // Remove any potential simple keys.
+ if !yaml_parser_remove_simple_key(parser) {
+ return false
+ }
+
+ // A simple key may follow a block scalar.
+ parser.simple_key_allowed = true
+
+ // Create the SCALAR token and append it to the queue.
+ var token yaml_token_t
+ if !yaml_parser_scan_block_scalar(parser, &token, literal) {
+ return false
+ }
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Produce the SCALAR(...,single-quoted) or SCALAR(...,double-quoted) tokens.
+func yaml_parser_fetch_flow_scalar(parser *yaml_parser_t, single bool) bool {
+ // A plain scalar could be a simple key.
+ if !yaml_parser_save_simple_key(parser) {
+ return false
+ }
+
+ // A simple key cannot follow a flow scalar.
+ parser.simple_key_allowed = false
+
+ // Create the SCALAR token and append it to the queue.
+ var token yaml_token_t
+ if !yaml_parser_scan_flow_scalar(parser, &token, single) {
+ return false
+ }
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Produce the SCALAR(...,plain) token.
+func yaml_parser_fetch_plain_scalar(parser *yaml_parser_t) bool {
+ // A plain scalar could be a simple key.
+ if !yaml_parser_save_simple_key(parser) {
+ return false
+ }
+
+ // A simple key cannot follow a flow scalar.
+ parser.simple_key_allowed = false
+
+ // Create the SCALAR token and append it to the queue.
+ var token yaml_token_t
+ if !yaml_parser_scan_plain_scalar(parser, &token) {
+ return false
+ }
+ yaml_insert_token(parser, -1, &token)
+ return true
+}
+
+// Eat whitespaces and comments until the next token is found.
+func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool {
+
+ // Until the next token is not found.
+ for {
+ // Allow the BOM mark to start a line.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ if parser.mark.column == 0 && is_bom(parser.buffer, parser.buffer_pos) {
+ skip(parser)
+ }
+
+ // Eat whitespaces.
+ // Tabs are allowed:
+ // - in the flow context
+ // - in the block context, but not at the beginning of the line or
+ // after '-', '?', or ':' (complex value).
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+
+ for parser.buffer[parser.buffer_pos] == ' ' || ((parser.flow_level > 0 || !parser.simple_key_allowed) && parser.buffer[parser.buffer_pos] == '\t') {
+ skip(parser)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+
+ // Eat a comment until a line break.
+ if parser.buffer[parser.buffer_pos] == '#' {
+ for !is_breakz(parser.buffer, parser.buffer_pos) {
+ skip(parser)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+ }
+
+ // If it is a line break, eat it.
+ if is_break(parser.buffer, parser.buffer_pos) {
+ if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
+ return false
+ }
+ skip_line(parser)
+
+ // In the block context, a new line may start a simple key.
+ if parser.flow_level == 0 {
+ parser.simple_key_allowed = true
+ }
+ } else {
+ break // We have found a token.
+ }
+ }
+
+ return true
+}
+
+// Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token.
+//
+// Scope:
+// %YAML 1.1 # a comment \n
+// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+// %TAG !yaml! tag:yaml.org,2002: \n
+// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+//
+func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool {
+ // Eat '%'.
+ start_mark := parser.mark
+ skip(parser)
+
+ // Scan the directive name.
+ var name []byte
+ if !yaml_parser_scan_directive_name(parser, start_mark, &name) {
+ return false
+ }
+
+ // Is it a YAML directive?
+ if bytes.Equal(name, []byte("YAML")) {
+ // Scan the VERSION directive value.
+ var major, minor int8
+ if !yaml_parser_scan_version_directive_value(parser, start_mark, &major, &minor) {
+ return false
+ }
+ end_mark := parser.mark
+
+ // Create a VERSION-DIRECTIVE token.
+ *token = yaml_token_t{
+ typ: yaml_VERSION_DIRECTIVE_TOKEN,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ major: major,
+ minor: minor,
+ }
+
+ // Is it a TAG directive?
+ } else if bytes.Equal(name, []byte("TAG")) {
+ // Scan the TAG directive value.
+ var handle, prefix []byte
+ if !yaml_parser_scan_tag_directive_value(parser, start_mark, &handle, &prefix) {
+ return false
+ }
+ end_mark := parser.mark
+
+ // Create a TAG-DIRECTIVE token.
+ *token = yaml_token_t{
+ typ: yaml_TAG_DIRECTIVE_TOKEN,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ value: handle,
+ prefix: prefix,
+ }
+
+ // Unknown directive.
+ } else {
+ yaml_parser_set_scanner_error(parser, "while scanning a directive",
+ start_mark, "found uknown directive name")
+ return false
+ }
+
+ // Eat the rest of the line including any comments.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+
+ for is_blank(parser.buffer, parser.buffer_pos) {
+ skip(parser)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+
+ if parser.buffer[parser.buffer_pos] == '#' {
+ for !is_breakz(parser.buffer, parser.buffer_pos) {
+ skip(parser)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+ }
+
+ // Check if we are at the end of the line.
+ if !is_breakz(parser.buffer, parser.buffer_pos) {
+ yaml_parser_set_scanner_error(parser, "while scanning a directive",
+ start_mark, "did not find expected comment or line break")
+ return false
+ }
+
+ // Eat a line break.
+ if is_break(parser.buffer, parser.buffer_pos) {
+ if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
+ return false
+ }
+ skip_line(parser)
+ }
+
+ return true
+}
+
+// Scan the directive name.
+//
+// Scope:
+// %YAML 1.1 # a comment \n
+// ^^^^
+// %TAG !yaml! tag:yaml.org,2002: \n
+// ^^^
+//
+func yaml_parser_scan_directive_name(parser *yaml_parser_t, start_mark yaml_mark_t, name *[]byte) bool {
+ // Consume the directive name.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+
+ var s []byte
+ for is_alpha(parser.buffer, parser.buffer_pos) {
+ s = read(parser, s)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+
+ // Check if the name is empty.
+ if len(s) == 0 {
+ yaml_parser_set_scanner_error(parser, "while scanning a directive",
+ start_mark, "could not find expected directive name")
+ return false
+ }
+
+ // Check for an blank character after the name.
+ if !is_blankz(parser.buffer, parser.buffer_pos) {
+ yaml_parser_set_scanner_error(parser, "while scanning a directive",
+ start_mark, "found unexpected non-alphabetical character")
+ return false
+ }
+ *name = s
+ return true
+}
+
+// Scan the value of VERSION-DIRECTIVE.
+//
+// Scope:
+// %YAML 1.1 # a comment \n
+// ^^^^^^
+func yaml_parser_scan_version_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, major, minor *int8) bool {
+ // Eat whitespaces.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ for is_blank(parser.buffer, parser.buffer_pos) {
+ skip(parser)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+
+ // Consume the major version number.
+ if !yaml_parser_scan_version_directive_number(parser, start_mark, major) {
+ return false
+ }
+
+ // Eat '.'.
+ if parser.buffer[parser.buffer_pos] != '.' {
+ return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive",
+ start_mark, "did not find expected digit or '.' character")
+ }
+
+ skip(parser)
+
+ // Consume the minor version number.
+ if !yaml_parser_scan_version_directive_number(parser, start_mark, minor) {
+ return false
+ }
+ return true
+}
+
+const max_number_length = 2
+
+// Scan the version number of VERSION-DIRECTIVE.
+//
+// Scope:
+// %YAML 1.1 # a comment \n
+// ^
+// %YAML 1.1 # a comment \n
+// ^
+func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, start_mark yaml_mark_t, number *int8) bool {
+
+ // Repeat while the next character is digit.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ var value, length int8
+ for is_digit(parser.buffer, parser.buffer_pos) {
+ // Check if the number is too long.
+ length++
+ if length > max_number_length {
+ return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive",
+ start_mark, "found extremely long version number")
+ }
+ value = value*10 + int8(as_digit(parser.buffer, parser.buffer_pos))
+ skip(parser)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+
+ // Check if the number was present.
+ if length == 0 {
+ return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive",
+ start_mark, "did not find expected version number")
+ }
+ *number = value
+ return true
+}
+
+// Scan the value of a TAG-DIRECTIVE token.
+//
+// Scope:
+// %TAG !yaml! tag:yaml.org,2002: \n
+// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+//
+func yaml_parser_scan_tag_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, handle, prefix *[]byte) bool {
+ var handle_value, prefix_value []byte
+
+ // Eat whitespaces.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+
+ for is_blank(parser.buffer, parser.buffer_pos) {
+ skip(parser)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+
+ // Scan a handle.
+ if !yaml_parser_scan_tag_handle(parser, true, start_mark, &handle_value) {
+ return false
+ }
+
+ // Expect a whitespace.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ if !is_blank(parser.buffer, parser.buffer_pos) {
+ yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive",
+ start_mark, "did not find expected whitespace")
+ return false
+ }
+
+ // Eat whitespaces.
+ for is_blank(parser.buffer, parser.buffer_pos) {
+ skip(parser)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+
+ // Scan a prefix.
+ if !yaml_parser_scan_tag_uri(parser, true, nil, start_mark, &prefix_value) {
+ return false
+ }
+
+ // Expect a whitespace or line break.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ if !is_blankz(parser.buffer, parser.buffer_pos) {
+ yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive",
+ start_mark, "did not find expected whitespace or line break")
+ return false
+ }
+
+ *handle = handle_value
+ *prefix = prefix_value
+ return true
+}
+
+func yaml_parser_scan_anchor(parser *yaml_parser_t, token *yaml_token_t, typ yaml_token_type_t) bool {
+ var s []byte
+
+ // Eat the indicator character.
+ start_mark := parser.mark
+ skip(parser)
+
+ // Consume the value.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+
+ for is_alpha(parser.buffer, parser.buffer_pos) {
+ s = read(parser, s)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+
+ end_mark := parser.mark
+
+ /*
+ * Check if length of the anchor is greater than 0 and it is followed by
+ * a whitespace character or one of the indicators:
+ *
+ * '?', ':', ',', ']', '}', '%', '@', '`'.
+ */
+
+ if len(s) == 0 ||
+ !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '?' ||
+ parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == ',' ||
+ parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '}' ||
+ parser.buffer[parser.buffer_pos] == '%' || parser.buffer[parser.buffer_pos] == '@' ||
+ parser.buffer[parser.buffer_pos] == '`') {
+ context := "while scanning an alias"
+ if typ == yaml_ANCHOR_TOKEN {
+ context = "while scanning an anchor"
+ }
+ yaml_parser_set_scanner_error(parser, context, start_mark,
+ "did not find expected alphabetic or numeric character")
+ return false
+ }
+
+ // Create a token.
+ *token = yaml_token_t{
+ typ: typ,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ value: s,
+ }
+
+ return true
+}
+
+/*
+ * Scan a TAG token.
+ */
+
+func yaml_parser_scan_tag(parser *yaml_parser_t, token *yaml_token_t) bool {
+ var handle, suffix []byte
+
+ start_mark := parser.mark
+
+ // Check if the tag is in the canonical form.
+ if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
+ return false
+ }
+
+ if parser.buffer[parser.buffer_pos+1] == '<' {
+ // Keep the handle as ''
+
+ // Eat '!<'
+ skip(parser)
+ skip(parser)
+
+ // Consume the tag value.
+ if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) {
+ return false
+ }
+
+ // Check for '>' and eat it.
+ if parser.buffer[parser.buffer_pos] != '>' {
+ yaml_parser_set_scanner_error(parser, "while scanning a tag",
+ start_mark, "did not find the expected '>'")
+ return false
+ }
+
+ skip(parser)
+ } else {
+ // The tag has either the '!suffix' or the '!handle!suffix' form.
+
+ // First, try to scan a handle.
+ if !yaml_parser_scan_tag_handle(parser, false, start_mark, &handle) {
+ return false
+ }
+
+ // Check if it is, indeed, handle.
+ if handle[0] == '!' && len(handle) > 1 && handle[len(handle)-1] == '!' {
+ // Scan the suffix now.
+ if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) {
+ return false
+ }
+ } else {
+ // It wasn't a handle after all. Scan the rest of the tag.
+ if !yaml_parser_scan_tag_uri(parser, false, handle, start_mark, &suffix) {
+ return false
+ }
+
+ // Set the handle to '!'.
+ handle = []byte{'!'}
+
+ // A special case: the '!' tag. Set the handle to '' and the
+ // suffix to '!'.
+ if len(suffix) == 0 {
+ handle, suffix = suffix, handle
+ }
+ }
+ }
+
+ // Check the character which ends the tag.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ if !is_blankz(parser.buffer, parser.buffer_pos) {
+ yaml_parser_set_scanner_error(parser, "while scanning a tag",
+ start_mark, "did not find expected whitespace or line break")
+ return false
+ }
+
+ end_mark := parser.mark
+
+ // Create a token.
+ *token = yaml_token_t{
+ typ: yaml_TAG_TOKEN,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ value: handle,
+ suffix: suffix,
+ }
+ return true
+}
+
+// Scan a tag handle.
+func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, handle *[]byte) bool {
+ // Check the initial '!' character.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ if parser.buffer[parser.buffer_pos] != '!' {
+ yaml_parser_set_scanner_tag_error(parser, directive,
+ start_mark, "did not find expected '!'")
+ return false
+ }
+
+ var s []byte
+
+ // Copy the '!' character.
+ s = read(parser, s)
+
+ // Copy all subsequent alphabetical and numerical characters.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ for is_alpha(parser.buffer, parser.buffer_pos) {
+ s = read(parser, s)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+
+ // Check if the trailing character is '!' and copy it.
+ if parser.buffer[parser.buffer_pos] == '!' {
+ s = read(parser, s)
+ } else {
+ // It's either the '!' tag or not really a tag handle. If it's a %TAG
+ // directive, it's an error. If it's a tag token, it must be a part of URI.
+ if directive && !(s[0] == '!' && s[1] == 0) {
+ yaml_parser_set_scanner_tag_error(parser, directive,
+ start_mark, "did not find expected '!'")
+ return false
+ }
+ }
+
+ *handle = s
+ return true
+}
+
+// Scan a tag.
+func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte, start_mark yaml_mark_t, uri *[]byte) bool {
+ //size_t length = head ? strlen((char *)head) : 0
+ var s []byte
+
+ // Copy the head if needed.
+ //
+ // Note that we don't copy the leading '!' character.
+ if len(head) > 1 {
+ s = append(s, head[1:]...)
+ }
+
+ // Scan the tag.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+
+ // The set of characters that may appear in URI is as follows:
+ //
+ // '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&',
+ // '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']',
+ // '%'.
+ // [Go] Convert this into more reasonable logic.
+ for is_alpha(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == ';' ||
+ parser.buffer[parser.buffer_pos] == '/' || parser.buffer[parser.buffer_pos] == '?' ||
+ parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == '@' ||
+ parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '=' ||
+ parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '$' ||
+ parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '.' ||
+ parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '~' ||
+ parser.buffer[parser.buffer_pos] == '*' || parser.buffer[parser.buffer_pos] == '\'' ||
+ parser.buffer[parser.buffer_pos] == '(' || parser.buffer[parser.buffer_pos] == ')' ||
+ parser.buffer[parser.buffer_pos] == '[' || parser.buffer[parser.buffer_pos] == ']' ||
+ parser.buffer[parser.buffer_pos] == '%' {
+ // Check if it is a URI-escape sequence.
+ if parser.buffer[parser.buffer_pos] == '%' {
+ if !yaml_parser_scan_uri_escapes(parser, directive, start_mark, &s) {
+ return false
+ }
+ } else {
+ s = read(parser, s)
+ }
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+
+ // Check if the tag is non-empty.
+ if len(s) == 0 {
+ yaml_parser_set_scanner_tag_error(parser, directive,
+ start_mark, "did not find expected tag URI")
+ return false
+ }
+ *uri = s
+ return true
+}
+
+// Decode an URI-escape sequence corresponding to a single UTF-8 character.
+func yaml_parser_scan_uri_escapes(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, s *[]byte) bool {
+
+ // Decode the required number of characters.
+ w := 1024
+ for w > 0 {
+ // Check for a URI-escaped octet.
+ if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) {
+ return false
+ }
+
+ if !(parser.buffer[parser.buffer_pos] == '%' &&
+ is_hex(parser.buffer, parser.buffer_pos+1) &&
+ is_hex(parser.buffer, parser.buffer_pos+2)) {
+ return yaml_parser_set_scanner_tag_error(parser, directive,
+ start_mark, "did not find URI escaped octet")
+ }
+
+ // Get the octet.
+ octet := byte((as_hex(parser.buffer, parser.buffer_pos+1) << 4) + as_hex(parser.buffer, parser.buffer_pos+2))
+
+ // If it is the leading octet, determine the length of the UTF-8 sequence.
+ if w == 1024 {
+ w = width(octet)
+ if w == 0 {
+ return yaml_parser_set_scanner_tag_error(parser, directive,
+ start_mark, "found an incorrect leading UTF-8 octet")
+ }
+ } else {
+ // Check if the trailing octet is correct.
+ if octet&0xC0 != 0x80 {
+ return yaml_parser_set_scanner_tag_error(parser, directive,
+ start_mark, "found an incorrect trailing UTF-8 octet")
+ }
+ }
+
+ // Copy the octet and move the pointers.
+ *s = append(*s, octet)
+ skip(parser)
+ skip(parser)
+ skip(parser)
+ w--
+ }
+ return true
+}
+
+// Scan a block scalar.
+func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, literal bool) bool {
+ // Eat the indicator '|' or '>'.
+ start_mark := parser.mark
+ skip(parser)
+
+ // Scan the additional block scalar indicators.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+
+ // Check for a chomping indicator.
+ var chomping, increment int
+ if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' {
+ // Set the chomping method and eat the indicator.
+ if parser.buffer[parser.buffer_pos] == '+' {
+ chomping = +1
+ } else {
+ chomping = -1
+ }
+ skip(parser)
+
+ // Check for an indentation indicator.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ if is_digit(parser.buffer, parser.buffer_pos) {
+ // Check that the intendation is greater than 0.
+ if parser.buffer[parser.buffer_pos] == '0' {
+ yaml_parser_set_scanner_error(parser, "while scanning a block scalar",
+ start_mark, "found an intendation indicator equal to 0")
+ return false
+ }
+
+ // Get the intendation level and eat the indicator.
+ increment = as_digit(parser.buffer, parser.buffer_pos)
+ skip(parser)
+ }
+
+ } else if is_digit(parser.buffer, parser.buffer_pos) {
+ // Do the same as above, but in the opposite order.
+
+ if parser.buffer[parser.buffer_pos] == '0' {
+ yaml_parser_set_scanner_error(parser, "while scanning a block scalar",
+ start_mark, "found an intendation indicator equal to 0")
+ return false
+ }
+ increment = as_digit(parser.buffer, parser.buffer_pos)
+ skip(parser)
+
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' {
+ if parser.buffer[parser.buffer_pos] == '+' {
+ chomping = +1
+ } else {
+ chomping = -1
+ }
+ skip(parser)
+ }
+ }
+
+ // Eat whitespaces and comments to the end of the line.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ for is_blank(parser.buffer, parser.buffer_pos) {
+ skip(parser)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+ if parser.buffer[parser.buffer_pos] == '#' {
+ for !is_breakz(parser.buffer, parser.buffer_pos) {
+ skip(parser)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+ }
+
+ // Check if we are at the end of the line.
+ if !is_breakz(parser.buffer, parser.buffer_pos) {
+ yaml_parser_set_scanner_error(parser, "while scanning a block scalar",
+ start_mark, "did not find expected comment or line break")
+ return false
+ }
+
+ // Eat a line break.
+ if is_break(parser.buffer, parser.buffer_pos) {
+ if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
+ return false
+ }
+ skip_line(parser)
+ }
+
+ end_mark := parser.mark
+
+ // Set the intendation level if it was specified.
+ var indent int
+ if increment > 0 {
+ if parser.indent >= 0 {
+ indent = parser.indent + increment
+ } else {
+ indent = increment
+ }
+ }
+
+ // Scan the leading line breaks and determine the indentation level if needed.
+ var s, leading_break, trailing_breaks []byte
+ if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) {
+ return false
+ }
+
+ // Scan the block scalar content.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ var leading_blank, trailing_blank bool
+ for parser.mark.column == indent && !is_z(parser.buffer, parser.buffer_pos) {
+ // We are at the beginning of a non-empty line.
+
+ // Is it a trailing whitespace?
+ trailing_blank = is_blank(parser.buffer, parser.buffer_pos)
+
+ // Check if we need to fold the leading line break.
+ if !literal && !leading_blank && !trailing_blank && len(leading_break) > 0 && leading_break[0] == '\n' {
+ // Do we need to join the lines by space?
+ if len(trailing_breaks) == 0 {
+ s = append(s, ' ')
+ }
+ } else {
+ s = append(s, leading_break...)
+ }
+ leading_break = leading_break[:0]
+
+ // Append the remaining line breaks.
+ s = append(s, trailing_breaks...)
+ trailing_breaks = trailing_breaks[:0]
+
+ // Is it a leading whitespace?
+ leading_blank = is_blank(parser.buffer, parser.buffer_pos)
+
+ // Consume the current line.
+ for !is_breakz(parser.buffer, parser.buffer_pos) {
+ s = read(parser, s)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+
+ // Consume the line break.
+ if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
+ return false
+ }
+
+ leading_break = read_line(parser, leading_break)
+
+ // Eat the following intendation spaces and line breaks.
+ if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) {
+ return false
+ }
+ }
+
+ // Chomp the tail.
+ if chomping != -1 {
+ s = append(s, leading_break...)
+ }
+ if chomping == 1 {
+ s = append(s, trailing_breaks...)
+ }
+
+ // Create a token.
+ *token = yaml_token_t{
+ typ: yaml_SCALAR_TOKEN,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ value: s,
+ style: yaml_LITERAL_SCALAR_STYLE,
+ }
+ if !literal {
+ token.style = yaml_FOLDED_SCALAR_STYLE
+ }
+ return true
+}
+
+// Scan intendation spaces and line breaks for a block scalar. Determine the
+// intendation level if needed.
+func yaml_parser_scan_block_scalar_breaks(parser *yaml_parser_t, indent *int, breaks *[]byte, start_mark yaml_mark_t, end_mark *yaml_mark_t) bool {
+ *end_mark = parser.mark
+
+ // Eat the intendation spaces and line breaks.
+ max_indent := 0
+ for {
+ // Eat the intendation spaces.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ for (*indent == 0 || parser.mark.column < *indent) && is_space(parser.buffer, parser.buffer_pos) {
+ skip(parser)
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+ if parser.mark.column > max_indent {
+ max_indent = parser.mark.column
+ }
+
+ // Check for a tab character messing the intendation.
+ if (*indent == 0 || parser.mark.column < *indent) && is_tab(parser.buffer, parser.buffer_pos) {
+ return yaml_parser_set_scanner_error(parser, "while scanning a block scalar",
+ start_mark, "found a tab character where an intendation space is expected")
+ }
+
+ // Have we found a non-empty line?
+ if !is_break(parser.buffer, parser.buffer_pos) {
+ break
+ }
+
+ // Consume the line break.
+ if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
+ return false
+ }
+ // [Go] Should really be returning breaks instead.
+ *breaks = read_line(parser, *breaks)
+ *end_mark = parser.mark
+ }
+
+ // Determine the indentation level if needed.
+ if *indent == 0 {
+ *indent = max_indent
+ if *indent < parser.indent+1 {
+ *indent = parser.indent + 1
+ }
+ if *indent < 1 {
+ *indent = 1
+ }
+ }
+ return true
+}
+
+// Scan a quoted scalar.
+func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, single bool) bool {
+ // Eat the left quote.
+ start_mark := parser.mark
+ skip(parser)
+
+ // Consume the content of the quoted scalar.
+ var s, leading_break, trailing_breaks, whitespaces []byte
+ for {
+ // Check that there are no document indicators at the beginning of the line.
+ if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) {
+ return false
+ }
+
+ if parser.mark.column == 0 &&
+ ((parser.buffer[parser.buffer_pos+0] == '-' &&
+ parser.buffer[parser.buffer_pos+1] == '-' &&
+ parser.buffer[parser.buffer_pos+2] == '-') ||
+ (parser.buffer[parser.buffer_pos+0] == '.' &&
+ parser.buffer[parser.buffer_pos+1] == '.' &&
+ parser.buffer[parser.buffer_pos+2] == '.')) &&
+ is_blankz(parser.buffer, parser.buffer_pos+3) {
+ yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar",
+ start_mark, "found unexpected document indicator")
+ return false
+ }
+
+ // Check for EOF.
+ if is_z(parser.buffer, parser.buffer_pos) {
+ yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar",
+ start_mark, "found unexpected end of stream")
+ return false
+ }
+
+ // Consume non-blank characters.
+ leading_blanks := false
+ for !is_blankz(parser.buffer, parser.buffer_pos) {
+ if single && parser.buffer[parser.buffer_pos] == '\'' && parser.buffer[parser.buffer_pos+1] == '\'' {
+ // Is is an escaped single quote.
+ s = append(s, '\'')
+ skip(parser)
+ skip(parser)
+
+ } else if single && parser.buffer[parser.buffer_pos] == '\'' {
+ // It is a right single quote.
+ break
+ } else if !single && parser.buffer[parser.buffer_pos] == '"' {
+ // It is a right double quote.
+ break
+
+ } else if !single && parser.buffer[parser.buffer_pos] == '\\' && is_break(parser.buffer, parser.buffer_pos+1) {
+ // It is an escaped line break.
+ if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) {
+ return false
+ }
+ skip(parser)
+ skip_line(parser)
+ leading_blanks = true
+ break
+
+ } else if !single && parser.buffer[parser.buffer_pos] == '\\' {
+ // It is an escape sequence.
+ code_length := 0
+
+ // Check the escape character.
+ switch parser.buffer[parser.buffer_pos+1] {
+ case '0':
+ s = append(s, 0)
+ case 'a':
+ s = append(s, '\x07')
+ case 'b':
+ s = append(s, '\x08')
+ case 't', '\t':
+ s = append(s, '\x09')
+ case 'n':
+ s = append(s, '\x0A')
+ case 'v':
+ s = append(s, '\x0B')
+ case 'f':
+ s = append(s, '\x0C')
+ case 'r':
+ s = append(s, '\x0D')
+ case 'e':
+ s = append(s, '\x1B')
+ case ' ':
+ s = append(s, '\x20')
+ case '"':
+ s = append(s, '"')
+ case '\'':
+ s = append(s, '\'')
+ case '\\':
+ s = append(s, '\\')
+ case 'N': // NEL (#x85)
+ s = append(s, '\xC2')
+ s = append(s, '\x85')
+ case '_': // #xA0
+ s = append(s, '\xC2')
+ s = append(s, '\xA0')
+ case 'L': // LS (#x2028)
+ s = append(s, '\xE2')
+ s = append(s, '\x80')
+ s = append(s, '\xA8')
+ case 'P': // PS (#x2029)
+ s = append(s, '\xE2')
+ s = append(s, '\x80')
+ s = append(s, '\xA9')
+ case 'x':
+ code_length = 2
+ case 'u':
+ code_length = 4
+ case 'U':
+ code_length = 8
+ default:
+ yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar",
+ start_mark, "found unknown escape character")
+ return false
+ }
+
+ skip(parser)
+ skip(parser)
+
+ // Consume an arbitrary escape code.
+ if code_length > 0 {
+ var value int
+
+ // Scan the character value.
+ if parser.unread < code_length && !yaml_parser_update_buffer(parser, code_length) {
+ return false
+ }
+ for k := 0; k < code_length; k++ {
+ if !is_hex(parser.buffer, parser.buffer_pos+k) {
+ yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar",
+ start_mark, "did not find expected hexdecimal number")
+ return false
+ }
+ value = (value << 4) + as_hex(parser.buffer, parser.buffer_pos+k)
+ }
+
+ // Check the value and write the character.
+ if (value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF {
+ yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar",
+ start_mark, "found invalid Unicode character escape code")
+ return false
+ }
+ if value <= 0x7F {
+ s = append(s, byte(value))
+ } else if value <= 0x7FF {
+ s = append(s, byte(0xC0+(value>>6)))
+ s = append(s, byte(0x80+(value&0x3F)))
+ } else if value <= 0xFFFF {
+ s = append(s, byte(0xE0+(value>>12)))
+ s = append(s, byte(0x80+((value>>6)&0x3F)))
+ s = append(s, byte(0x80+(value&0x3F)))
+ } else {
+ s = append(s, byte(0xF0+(value>>18)))
+ s = append(s, byte(0x80+((value>>12)&0x3F)))
+ s = append(s, byte(0x80+((value>>6)&0x3F)))
+ s = append(s, byte(0x80+(value&0x3F)))
+ }
+
+ // Advance the pointer.
+ for k := 0; k < code_length; k++ {
+ skip(parser)
+ }
+ }
+ } else {
+ // It is a non-escaped non-blank character.
+ s = read(parser, s)
+ }
+ if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
+ return false
+ }
+ }
+
+ // Check if we are at the end of the scalar.
+ if single {
+ if parser.buffer[parser.buffer_pos] == '\'' {
+ break
+ }
+ } else {
+ if parser.buffer[parser.buffer_pos] == '"' {
+ break
+ }
+ }
+
+ // Consume blank characters.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+
+ for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) {
+ if is_blank(parser.buffer, parser.buffer_pos) {
+ // Consume a space or a tab character.
+ if !leading_blanks {
+ whitespaces = read(parser, whitespaces)
+ } else {
+ skip(parser)
+ }
+ } else {
+ if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
+ return false
+ }
+
+ // Check if it is a first line break.
+ if !leading_blanks {
+ whitespaces = whitespaces[:0]
+ leading_break = read_line(parser, leading_break)
+ leading_blanks = true
+ } else {
+ trailing_breaks = read_line(parser, trailing_breaks)
+ }
+ }
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+
+ // Join the whitespaces or fold line breaks.
+ if leading_blanks {
+ // Do we need to fold line breaks?
+ if len(leading_break) > 0 && leading_break[0] == '\n' {
+ if len(trailing_breaks) == 0 {
+ s = append(s, ' ')
+ } else {
+ s = append(s, trailing_breaks...)
+ }
+ } else {
+ s = append(s, leading_break...)
+ s = append(s, trailing_breaks...)
+ }
+ trailing_breaks = trailing_breaks[:0]
+ leading_break = leading_break[:0]
+ } else {
+ s = append(s, whitespaces...)
+ whitespaces = whitespaces[:0]
+ }
+ }
+
+ // Eat the right quote.
+ skip(parser)
+ end_mark := parser.mark
+
+ // Create a token.
+ *token = yaml_token_t{
+ typ: yaml_SCALAR_TOKEN,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ value: s,
+ style: yaml_SINGLE_QUOTED_SCALAR_STYLE,
+ }
+ if !single {
+ token.style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
+ }
+ return true
+}
+
+// Scan a plain scalar.
+func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) bool {
+
+ var s, leading_break, trailing_breaks, whitespaces []byte
+ var leading_blanks bool
+ var indent = parser.indent + 1
+
+ start_mark := parser.mark
+ end_mark := parser.mark
+
+ // Consume the content of the plain scalar.
+ for {
+ // Check for a document indicator.
+ if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) {
+ return false
+ }
+ if parser.mark.column == 0 &&
+ ((parser.buffer[parser.buffer_pos+0] == '-' &&
+ parser.buffer[parser.buffer_pos+1] == '-' &&
+ parser.buffer[parser.buffer_pos+2] == '-') ||
+ (parser.buffer[parser.buffer_pos+0] == '.' &&
+ parser.buffer[parser.buffer_pos+1] == '.' &&
+ parser.buffer[parser.buffer_pos+2] == '.')) &&
+ is_blankz(parser.buffer, parser.buffer_pos+3) {
+ break
+ }
+
+ // Check for a comment.
+ if parser.buffer[parser.buffer_pos] == '#' {
+ break
+ }
+
+ // Consume non-blank characters.
+ for !is_blankz(parser.buffer, parser.buffer_pos) {
+
+ // Check for 'x:x' in the flow context. TODO: Fix the test "spec-08-13".
+ if parser.flow_level > 0 &&
+ parser.buffer[parser.buffer_pos] == ':' &&
+ !is_blankz(parser.buffer, parser.buffer_pos+1) {
+ yaml_parser_set_scanner_error(parser, "while scanning a plain scalar",
+ start_mark, "found unexpected ':'")
+ return false
+ }
+
+ // Check for indicators that may end a plain scalar.
+ if (parser.buffer[parser.buffer_pos] == ':' && is_blankz(parser.buffer, parser.buffer_pos+1)) ||
+ (parser.flow_level > 0 &&
+ (parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == ':' ||
+ parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == '[' ||
+ parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' ||
+ parser.buffer[parser.buffer_pos] == '}')) {
+ break
+ }
+
+ // Check if we need to join whitespaces and breaks.
+ if leading_blanks || len(whitespaces) > 0 {
+ if leading_blanks {
+ // Do we need to fold line breaks?
+ if leading_break[0] == '\n' {
+ if len(trailing_breaks) == 0 {
+ s = append(s, ' ')
+ } else {
+ s = append(s, trailing_breaks...)
+ }
+ } else {
+ s = append(s, leading_break...)
+ s = append(s, trailing_breaks...)
+ }
+ trailing_breaks = trailing_breaks[:0]
+ leading_break = leading_break[:0]
+ leading_blanks = false
+ } else {
+ s = append(s, whitespaces...)
+ whitespaces = whitespaces[:0]
+ }
+ }
+
+ // Copy the character.
+ s = read(parser, s)
+
+ end_mark = parser.mark
+ if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
+ return false
+ }
+ }
+
+ // Is it the end?
+ if !(is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos)) {
+ break
+ }
+
+ // Consume blank characters.
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+
+ for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) {
+ if is_blank(parser.buffer, parser.buffer_pos) {
+
+ // Check for tab character that abuse intendation.
+ if leading_blanks && parser.mark.column < indent && is_tab(parser.buffer, parser.buffer_pos) {
+ yaml_parser_set_scanner_error(parser, "while scanning a plain scalar",
+ start_mark, "found a tab character that violate intendation")
+ return false
+ }
+
+ // Consume a space or a tab character.
+ if !leading_blanks {
+ whitespaces = read(parser, whitespaces)
+ } else {
+ skip(parser)
+ }
+ } else {
+ if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
+ return false
+ }
+
+ // Check if it is a first line break.
+ if !leading_blanks {
+ whitespaces = whitespaces[:0]
+ leading_break = read_line(parser, leading_break)
+ leading_blanks = true
+ } else {
+ trailing_breaks = read_line(parser, trailing_breaks)
+ }
+ }
+ if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
+ return false
+ }
+ }
+
+ // Check intendation level.
+ if parser.flow_level == 0 && parser.mark.column < indent {
+ break
+ }
+ }
+
+ // Create a token.
+ *token = yaml_token_t{
+ typ: yaml_SCALAR_TOKEN,
+ start_mark: start_mark,
+ end_mark: end_mark,
+ value: s,
+ style: yaml_PLAIN_SCALAR_STYLE,
+ }
+
+ // Note that we change the 'simple_key_allowed' flag.
+ if leading_blanks {
+ parser.simple_key_allowed = true
+ }
+ return true
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/sorter.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/sorter.go
new file mode 100644
index 00000000..5958822f
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/sorter.go
@@ -0,0 +1,104 @@
+package yaml
+
+import (
+ "reflect"
+ "unicode"
+)
+
+type keyList []reflect.Value
+
+func (l keyList) Len() int { return len(l) }
+func (l keyList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
+func (l keyList) Less(i, j int) bool {
+ a := l[i]
+ b := l[j]
+ ak := a.Kind()
+ bk := b.Kind()
+ for (ak == reflect.Interface || ak == reflect.Ptr) && !a.IsNil() {
+ a = a.Elem()
+ ak = a.Kind()
+ }
+ for (bk == reflect.Interface || bk == reflect.Ptr) && !b.IsNil() {
+ b = b.Elem()
+ bk = b.Kind()
+ }
+ af, aok := keyFloat(a)
+ bf, bok := keyFloat(b)
+ if aok && bok {
+ if af != bf {
+ return af < bf
+ }
+ if ak != bk {
+ return ak < bk
+ }
+ return numLess(a, b)
+ }
+ if ak != reflect.String || bk != reflect.String {
+ return ak < bk
+ }
+ ar, br := []rune(a.String()), []rune(b.String())
+ for i := 0; i < len(ar) && i < len(br); i++ {
+ if ar[i] == br[i] {
+ continue
+ }
+ al := unicode.IsLetter(ar[i])
+ bl := unicode.IsLetter(br[i])
+ if al && bl {
+ return ar[i] < br[i]
+ }
+ if al || bl {
+ return bl
+ }
+ var ai, bi int
+ var an, bn int64
+ for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ {
+ an = an*10 + int64(ar[ai]-'0')
+ }
+ for bi = i; bi < len(br) && unicode.IsDigit(br[bi]); bi++ {
+ bn = bn*10 + int64(br[bi]-'0')
+ }
+ if an != bn {
+ return an < bn
+ }
+ if ai != bi {
+ return ai < bi
+ }
+ return ar[i] < br[i]
+ }
+ return len(ar) < len(br)
+}
+
+// keyFloat returns a float value for v if it is a number/bool
+// and whether it is a number/bool or not.
+func keyFloat(v reflect.Value) (f float64, ok bool) {
+ switch v.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return float64(v.Int()), true
+ case reflect.Float32, reflect.Float64:
+ return v.Float(), true
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return float64(v.Uint()), true
+ case reflect.Bool:
+ if v.Bool() {
+ return 1, true
+ }
+ return 0, true
+ }
+ return 0, false
+}
+
+// numLess returns whether a < b.
+// a and b must necessarily have the same kind.
+func numLess(a, b reflect.Value) bool {
+ switch a.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return a.Int() < b.Int()
+ case reflect.Float32, reflect.Float64:
+ return a.Float() < b.Float()
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return a.Uint() < b.Uint()
+ case reflect.Bool:
+ return !a.Bool() && b.Bool()
+ }
+ panic("not a number")
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/writerc.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/writerc.go
new file mode 100644
index 00000000..190362f2
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/writerc.go
@@ -0,0 +1,89 @@
+package yaml
+
+// Set the writer error and return false.
+func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool {
+ emitter.error = yaml_WRITER_ERROR
+ emitter.problem = problem
+ return false
+}
+
+// Flush the output buffer.
+func yaml_emitter_flush(emitter *yaml_emitter_t) bool {
+ if emitter.write_handler == nil {
+ panic("write handler not set")
+ }
+
+ // Check if the buffer is empty.
+ if emitter.buffer_pos == 0 {
+ return true
+ }
+
+ // If the output encoding is UTF-8, we don't need to recode the buffer.
+ if emitter.encoding == yaml_UTF8_ENCODING {
+ if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil {
+ return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
+ }
+ emitter.buffer_pos = 0
+ return true
+ }
+
+ // Recode the buffer into the raw buffer.
+ var low, high int
+ if emitter.encoding == yaml_UTF16LE_ENCODING {
+ low, high = 0, 1
+ } else {
+ high, low = 1, 0
+ }
+
+ pos := 0
+ for pos < emitter.buffer_pos {
+ // See the "reader.c" code for more details on UTF-8 encoding. Note
+ // that we assume that the buffer contains a valid UTF-8 sequence.
+
+ // Read the next UTF-8 character.
+ octet := emitter.buffer[pos]
+
+ var w int
+ var value rune
+ switch {
+ case octet&0x80 == 0x00:
+ w, value = 1, rune(octet&0x7F)
+ case octet&0xE0 == 0xC0:
+ w, value = 2, rune(octet&0x1F)
+ case octet&0xF0 == 0xE0:
+ w, value = 3, rune(octet&0x0F)
+ case octet&0xF8 == 0xF0:
+ w, value = 4, rune(octet&0x07)
+ }
+ for k := 1; k < w; k++ {
+ octet = emitter.buffer[pos+k]
+ value = (value << 6) + (rune(octet) & 0x3F)
+ }
+ pos += w
+
+ // Write the character.
+ if value < 0x10000 {
+ var b [2]byte
+ b[high] = byte(value >> 8)
+ b[low] = byte(value & 0xFF)
+ emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1])
+ } else {
+ // Write the character using a surrogate pair (check "reader.c").
+ var b [4]byte
+ value -= 0x10000
+ b[high] = byte(0xD8 + (value >> 18))
+ b[low] = byte((value >> 10) & 0xFF)
+ b[high+2] = byte(0xDC + ((value >> 8) & 0xFF))
+ b[low+2] = byte(value & 0xFF)
+ emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1], b[2], b[3])
+ }
+ }
+
+ // Write the raw buffer.
+ if err := emitter.write_handler(emitter, emitter.raw_buffer); err != nil {
+ return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
+ }
+ emitter.buffer_pos = 0
+ emitter.raw_buffer = emitter.raw_buffer[:0]
+ return true
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/yaml.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/yaml.go
new file mode 100644
index 00000000..f1c390e7
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/yaml.go
@@ -0,0 +1,301 @@
+// Package yaml implements YAML support for the Go language.
+//
+// Source code and other details for the project are available at GitHub:
+//
+// https://github.com/go-yaml/yaml
+//
+package yaml
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+ "sync"
+)
+
+type yamlError string
+
+func fail(msg string) {
+ panic(yamlError(msg))
+}
+
+func handleErr(err *error) {
+ if r := recover(); r != nil {
+ if e, ok := r.(yamlError); ok {
+ *err = errors.New("YAML error: " + string(e))
+ } else {
+ panic(r)
+ }
+ }
+}
+
+// The Setter interface may be implemented by types to do their own custom
+// unmarshalling of YAML values, rather than being implicitly assigned by
+// the yaml package machinery. If setting the value works, the method should
+// return true. If it returns false, the value is considered unsupported
+// and is omitted from maps and slices.
+type Setter interface {
+ SetYAML(tag string, value interface{}) bool
+}
+
+// The Getter interface is implemented by types to do their own custom
+// marshalling into a YAML tag and value.
+type Getter interface {
+ GetYAML() (tag string, value interface{})
+}
+
+// Unmarshal decodes the first document found within the in byte slice
+// and assigns decoded values into the out value.
+//
+// Maps and pointers (to a struct, string, int, etc) are accepted as out
+// values. If an internal pointer within a struct is not initialized,
+// the yaml package will initialize it if necessary for unmarshalling
+// the provided data. The out parameter must not be nil.
+//
+// The type of the decoded values and the type of out will be considered,
+// and Unmarshal will do the best possible job to unmarshal values
+// appropriately. It is NOT considered an error, though, to skip values
+// because they are not available in the decoded YAML, or if they are not
+// compatible with the out value. To ensure something was properly
+// unmarshaled use a map or compare against the previous value for the
+// field (usually the zero value).
+//
+// Struct fields are only unmarshalled if they are exported (have an
+// upper case first letter), and are unmarshalled using the field name
+// lowercased as the default key. Custom keys may be defined via the
+// "yaml" name in the field tag: the content preceding the first comma
+// is used as the key, and the following comma-separated options are
+// used to tweak the marshalling process (see Marshal).
+// Conflicting names result in a runtime error.
+//
+// For example:
+//
+// type T struct {
+// F int `yaml:"a,omitempty"`
+// B int
+// }
+// var t T
+// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t)
+//
+// See the documentation of Marshal for the format of tags and a list of
+// supported tag options.
+//
+func Unmarshal(in []byte, out interface{}) (err error) {
+ defer handleErr(&err)
+ d := newDecoder()
+ p := newParser(in)
+ defer p.destroy()
+ node := p.parse()
+ if node != nil {
+ v := reflect.ValueOf(out)
+ if v.Kind() == reflect.Ptr && !v.IsNil() {
+ v = v.Elem()
+ }
+ d.unmarshal(node, v)
+ }
+ return nil
+}
+
+// Marshal serializes the value provided into a YAML document. The structure
+// of the generated document will reflect the structure of the value itself.
+// Maps and pointers (to struct, string, int, etc) are accepted as the in value.
+//
+// Struct fields are only unmarshalled if they are exported (have an upper case
+// first letter), and are unmarshalled using the field name lowercased as the
+// default key. Custom keys may be defined via the "yaml" name in the field
+// tag: the content preceding the first comma is used as the key, and the
+// following comma-separated options are used to tweak the marshalling process.
+// Conflicting names result in a runtime error.
+//
+// The field tag format accepted is:
+//
+// `(...) yaml:"[][,[,]]" (...)`
+//
+// The following flags are currently supported:
+//
+// omitempty Only include the field if it's not set to the zero
+// value for the type or to empty slices or maps.
+// Does not apply to zero valued structs.
+//
+// flow Marshal using a flow style (useful for structs,
+// sequences and maps.
+//
+// inline Inline the struct it's applied to, so its fields
+// are processed as if they were part of the outer
+// struct.
+//
+// In addition, if the key is "-", the field is ignored.
+//
+// For example:
+//
+// type T struct {
+// F int "a,omitempty"
+// B int
+// }
+// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n"
+// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n"
+//
+func Marshal(in interface{}) (out []byte, err error) {
+ defer handleErr(&err)
+ e := newEncoder()
+ defer e.destroy()
+ e.marshal("", reflect.ValueOf(in))
+ e.finish()
+ out = e.out
+ return
+}
+
+// --------------------------------------------------------------------------
+// Maintain a mapping of keys to structure field indexes
+
+// The code in this section was copied from mgo/bson.
+
+// structInfo holds details for the serialization of fields of
+// a given struct.
+type structInfo struct {
+ FieldsMap map[string]fieldInfo
+ FieldsList []fieldInfo
+
+ // InlineMap is the number of the field in the struct that
+ // contains an ,inline map, or -1 if there's none.
+ InlineMap int
+}
+
+type fieldInfo struct {
+ Key string
+ Num int
+ OmitEmpty bool
+ Flow bool
+
+ // Inline holds the field index if the field is part of an inlined struct.
+ Inline []int
+}
+
+var structMap = make(map[reflect.Type]*structInfo)
+var fieldMapMutex sync.RWMutex
+
+func getStructInfo(st reflect.Type) (*structInfo, error) {
+ fieldMapMutex.RLock()
+ sinfo, found := structMap[st]
+ fieldMapMutex.RUnlock()
+ if found {
+ return sinfo, nil
+ }
+
+ n := st.NumField()
+ fieldsMap := make(map[string]fieldInfo)
+ fieldsList := make([]fieldInfo, 0, n)
+ inlineMap := -1
+ for i := 0; i != n; i++ {
+ field := st.Field(i)
+ if field.PkgPath != "" {
+ continue // Private field
+ }
+
+ info := fieldInfo{Num: i}
+
+ tag := field.Tag.Get("yaml")
+ if tag == "" && strings.Index(string(field.Tag), ":") < 0 {
+ tag = string(field.Tag)
+ }
+ if tag == "-" {
+ continue
+ }
+
+ inline := false
+ fields := strings.Split(tag, ",")
+ if len(fields) > 1 {
+ for _, flag := range fields[1:] {
+ switch flag {
+ case "omitempty":
+ info.OmitEmpty = true
+ case "flow":
+ info.Flow = true
+ case "inline":
+ inline = true
+ default:
+ return nil, errors.New(fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st))
+ }
+ }
+ tag = fields[0]
+ }
+
+ if inline {
+ switch field.Type.Kind() {
+ // TODO: Implement support for inline maps.
+ //case reflect.Map:
+ // if inlineMap >= 0 {
+ // return nil, errors.New("Multiple ,inline maps in struct " + st.String())
+ // }
+ // if field.Type.Key() != reflect.TypeOf("") {
+ // return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String())
+ // }
+ // inlineMap = info.Num
+ case reflect.Struct:
+ sinfo, err := getStructInfo(field.Type)
+ if err != nil {
+ return nil, err
+ }
+ for _, finfo := range sinfo.FieldsList {
+ if _, found := fieldsMap[finfo.Key]; found {
+ msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String()
+ return nil, errors.New(msg)
+ }
+ if finfo.Inline == nil {
+ finfo.Inline = []int{i, finfo.Num}
+ } else {
+ finfo.Inline = append([]int{i}, finfo.Inline...)
+ }
+ fieldsMap[finfo.Key] = finfo
+ fieldsList = append(fieldsList, finfo)
+ }
+ default:
+ //return nil, errors.New("Option ,inline needs a struct value or map field")
+ return nil, errors.New("Option ,inline needs a struct value field")
+ }
+ continue
+ }
+
+ if tag != "" {
+ info.Key = tag
+ } else {
+ info.Key = strings.ToLower(field.Name)
+ }
+
+ if _, found = fieldsMap[info.Key]; found {
+ msg := "Duplicated key '" + info.Key + "' in struct " + st.String()
+ return nil, errors.New(msg)
+ }
+
+ fieldsList = append(fieldsList, info)
+ fieldsMap[info.Key] = info
+ }
+
+ sinfo = &structInfo{fieldsMap, fieldsList, inlineMap}
+
+ fieldMapMutex.Lock()
+ structMap[st] = sinfo
+ fieldMapMutex.Unlock()
+ return sinfo, nil
+}
+
+func isZero(v reflect.Value) bool {
+ switch v.Kind() {
+ case reflect.String:
+ return len(v.String()) == 0
+ case reflect.Interface, reflect.Ptr:
+ return v.IsNil()
+ case reflect.Slice:
+ return v.Len() == 0
+ case reflect.Map:
+ return v.Len() == 0
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Int() == 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return v.Uint() == 0
+ case reflect.Bool:
+ return !v.Bool()
+ }
+ return false
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/yamlh.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/yamlh.go
new file mode 100644
index 00000000..4b020b1b
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/yamlh.go
@@ -0,0 +1,716 @@
+package yaml
+
+import (
+ "io"
+)
+
+// The version directive data.
+type yaml_version_directive_t struct {
+ major int8 // The major version number.
+ minor int8 // The minor version number.
+}
+
+// The tag directive data.
+type yaml_tag_directive_t struct {
+ handle []byte // The tag handle.
+ prefix []byte // The tag prefix.
+}
+
+type yaml_encoding_t int
+
+// The stream encoding.
+const (
+ // Let the parser choose the encoding.
+ yaml_ANY_ENCODING yaml_encoding_t = iota
+
+ yaml_UTF8_ENCODING // The default UTF-8 encoding.
+ yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM.
+ yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM.
+)
+
+type yaml_break_t int
+
+// Line break types.
+const (
+ // Let the parser choose the break type.
+ yaml_ANY_BREAK yaml_break_t = iota
+
+ yaml_CR_BREAK // Use CR for line breaks (Mac style).
+ yaml_LN_BREAK // Use LN for line breaks (Unix style).
+ yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style).
+)
+
+type yaml_error_type_t int
+
+// Many bad things could happen with the parser and emitter.
+const (
+ // No error is produced.
+ yaml_NO_ERROR yaml_error_type_t = iota
+
+ yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory.
+ yaml_READER_ERROR // Cannot read or decode the input stream.
+ yaml_SCANNER_ERROR // Cannot scan the input stream.
+ yaml_PARSER_ERROR // Cannot parse the input stream.
+ yaml_COMPOSER_ERROR // Cannot compose a YAML document.
+ yaml_WRITER_ERROR // Cannot write to the output stream.
+ yaml_EMITTER_ERROR // Cannot emit a YAML stream.
+)
+
+// The pointer position.
+type yaml_mark_t struct {
+ index int // The position index.
+ line int // The position line.
+ column int // The position column.
+}
+
+// Node Styles
+
+type yaml_style_t int8
+
+type yaml_scalar_style_t yaml_style_t
+
+// Scalar styles.
+const (
+ // Let the emitter choose the style.
+ yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota
+
+ yaml_PLAIN_SCALAR_STYLE // The plain scalar style.
+ yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style.
+ yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style.
+ yaml_LITERAL_SCALAR_STYLE // The literal scalar style.
+ yaml_FOLDED_SCALAR_STYLE // The folded scalar style.
+)
+
+type yaml_sequence_style_t yaml_style_t
+
+// Sequence styles.
+const (
+ // Let the emitter choose the style.
+ yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota
+
+ yaml_BLOCK_SEQUENCE_STYLE // The block sequence style.
+ yaml_FLOW_SEQUENCE_STYLE // The flow sequence style.
+)
+
+type yaml_mapping_style_t yaml_style_t
+
+// Mapping styles.
+const (
+ // Let the emitter choose the style.
+ yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota
+
+ yaml_BLOCK_MAPPING_STYLE // The block mapping style.
+ yaml_FLOW_MAPPING_STYLE // The flow mapping style.
+)
+
+// Tokens
+
+type yaml_token_type_t int
+
+// Token types.
+const (
+ // An empty token.
+ yaml_NO_TOKEN yaml_token_type_t = iota
+
+ yaml_STREAM_START_TOKEN // A STREAM-START token.
+ yaml_STREAM_END_TOKEN // A STREAM-END token.
+
+ yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token.
+ yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token.
+ yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token.
+ yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token.
+
+ yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token.
+ yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token.
+ yaml_BLOCK_END_TOKEN // A BLOCK-END token.
+
+ yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token.
+ yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token.
+ yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token.
+ yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token.
+
+ yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token.
+ yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token.
+ yaml_KEY_TOKEN // A KEY token.
+ yaml_VALUE_TOKEN // A VALUE token.
+
+ yaml_ALIAS_TOKEN // An ALIAS token.
+ yaml_ANCHOR_TOKEN // An ANCHOR token.
+ yaml_TAG_TOKEN // A TAG token.
+ yaml_SCALAR_TOKEN // A SCALAR token.
+)
+
+func (tt yaml_token_type_t) String() string {
+ switch tt {
+ case yaml_NO_TOKEN:
+ return "yaml_NO_TOKEN"
+ case yaml_STREAM_START_TOKEN:
+ return "yaml_STREAM_START_TOKEN"
+ case yaml_STREAM_END_TOKEN:
+ return "yaml_STREAM_END_TOKEN"
+ case yaml_VERSION_DIRECTIVE_TOKEN:
+ return "yaml_VERSION_DIRECTIVE_TOKEN"
+ case yaml_TAG_DIRECTIVE_TOKEN:
+ return "yaml_TAG_DIRECTIVE_TOKEN"
+ case yaml_DOCUMENT_START_TOKEN:
+ return "yaml_DOCUMENT_START_TOKEN"
+ case yaml_DOCUMENT_END_TOKEN:
+ return "yaml_DOCUMENT_END_TOKEN"
+ case yaml_BLOCK_SEQUENCE_START_TOKEN:
+ return "yaml_BLOCK_SEQUENCE_START_TOKEN"
+ case yaml_BLOCK_MAPPING_START_TOKEN:
+ return "yaml_BLOCK_MAPPING_START_TOKEN"
+ case yaml_BLOCK_END_TOKEN:
+ return "yaml_BLOCK_END_TOKEN"
+ case yaml_FLOW_SEQUENCE_START_TOKEN:
+ return "yaml_FLOW_SEQUENCE_START_TOKEN"
+ case yaml_FLOW_SEQUENCE_END_TOKEN:
+ return "yaml_FLOW_SEQUENCE_END_TOKEN"
+ case yaml_FLOW_MAPPING_START_TOKEN:
+ return "yaml_FLOW_MAPPING_START_TOKEN"
+ case yaml_FLOW_MAPPING_END_TOKEN:
+ return "yaml_FLOW_MAPPING_END_TOKEN"
+ case yaml_BLOCK_ENTRY_TOKEN:
+ return "yaml_BLOCK_ENTRY_TOKEN"
+ case yaml_FLOW_ENTRY_TOKEN:
+ return "yaml_FLOW_ENTRY_TOKEN"
+ case yaml_KEY_TOKEN:
+ return "yaml_KEY_TOKEN"
+ case yaml_VALUE_TOKEN:
+ return "yaml_VALUE_TOKEN"
+ case yaml_ALIAS_TOKEN:
+ return "yaml_ALIAS_TOKEN"
+ case yaml_ANCHOR_TOKEN:
+ return "yaml_ANCHOR_TOKEN"
+ case yaml_TAG_TOKEN:
+ return "yaml_TAG_TOKEN"
+ case yaml_SCALAR_TOKEN:
+ return "yaml_SCALAR_TOKEN"
+ }
+ return ""
+}
+
+// The token structure.
+type yaml_token_t struct {
+ // The token type.
+ typ yaml_token_type_t
+
+ // The start/end of the token.
+ start_mark, end_mark yaml_mark_t
+
+ // The stream encoding (for yaml_STREAM_START_TOKEN).
+ encoding yaml_encoding_t
+
+ // The alias/anchor/scalar value or tag/tag directive handle
+ // (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN).
+ value []byte
+
+ // The tag suffix (for yaml_TAG_TOKEN).
+ suffix []byte
+
+ // The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN).
+ prefix []byte
+
+ // The scalar style (for yaml_SCALAR_TOKEN).
+ style yaml_scalar_style_t
+
+ // The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN).
+ major, minor int8
+}
+
+// Events
+
+type yaml_event_type_t int8
+
+// Event types.
+const (
+ // An empty event.
+ yaml_NO_EVENT yaml_event_type_t = iota
+
+ yaml_STREAM_START_EVENT // A STREAM-START event.
+ yaml_STREAM_END_EVENT // A STREAM-END event.
+ yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event.
+ yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event.
+ yaml_ALIAS_EVENT // An ALIAS event.
+ yaml_SCALAR_EVENT // A SCALAR event.
+ yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event.
+ yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event.
+ yaml_MAPPING_START_EVENT // A MAPPING-START event.
+ yaml_MAPPING_END_EVENT // A MAPPING-END event.
+)
+
+// The event structure.
+type yaml_event_t struct {
+
+ // The event type.
+ typ yaml_event_type_t
+
+ // The start and end of the event.
+ start_mark, end_mark yaml_mark_t
+
+ // The document encoding (for yaml_STREAM_START_EVENT).
+ encoding yaml_encoding_t
+
+ // The version directive (for yaml_DOCUMENT_START_EVENT).
+ version_directive *yaml_version_directive_t
+
+ // The list of tag directives (for yaml_DOCUMENT_START_EVENT).
+ tag_directives []yaml_tag_directive_t
+
+ // The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT).
+ anchor []byte
+
+ // The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT).
+ tag []byte
+
+ // The scalar value (for yaml_SCALAR_EVENT).
+ value []byte
+
+ // Is the document start/end indicator implicit, or the tag optional?
+ // (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT).
+ implicit bool
+
+ // Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT).
+ quoted_implicit bool
+
+ // The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT).
+ style yaml_style_t
+}
+
+func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) }
+func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) }
+func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) }
+
+// Nodes
+
+const (
+ yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null.
+ yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false.
+ yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values.
+ yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values.
+ yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values.
+ yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values.
+
+ yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences.
+ yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping.
+
+ // Not in original libyaml.
+ yaml_BINARY_TAG = "tag:yaml.org,2002:binary"
+ yaml_MERGE_TAG = "tag:yaml.org,2002:merge"
+
+ yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str.
+ yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq.
+ yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map.
+)
+
+type yaml_node_type_t int
+
+// Node types.
+const (
+ // An empty node.
+ yaml_NO_NODE yaml_node_type_t = iota
+
+ yaml_SCALAR_NODE // A scalar node.
+ yaml_SEQUENCE_NODE // A sequence node.
+ yaml_MAPPING_NODE // A mapping node.
+)
+
+// An element of a sequence node.
+type yaml_node_item_t int
+
+// An element of a mapping node.
+type yaml_node_pair_t struct {
+ key int // The key of the element.
+ value int // The value of the element.
+}
+
+// The node structure.
+type yaml_node_t struct {
+ typ yaml_node_type_t // The node type.
+ tag []byte // The node tag.
+
+ // The node data.
+
+ // The scalar parameters (for yaml_SCALAR_NODE).
+ scalar struct {
+ value []byte // The scalar value.
+ length int // The length of the scalar value.
+ style yaml_scalar_style_t // The scalar style.
+ }
+
+ // The sequence parameters (for YAML_SEQUENCE_NODE).
+ sequence struct {
+ items_data []yaml_node_item_t // The stack of sequence items.
+ style yaml_sequence_style_t // The sequence style.
+ }
+
+ // The mapping parameters (for yaml_MAPPING_NODE).
+ mapping struct {
+ pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value).
+ pairs_start *yaml_node_pair_t // The beginning of the stack.
+ pairs_end *yaml_node_pair_t // The end of the stack.
+ pairs_top *yaml_node_pair_t // The top of the stack.
+ style yaml_mapping_style_t // The mapping style.
+ }
+
+ start_mark yaml_mark_t // The beginning of the node.
+ end_mark yaml_mark_t // The end of the node.
+
+}
+
+// The document structure.
+type yaml_document_t struct {
+
+ // The document nodes.
+ nodes []yaml_node_t
+
+ // The version directive.
+ version_directive *yaml_version_directive_t
+
+ // The list of tag directives.
+ tag_directives_data []yaml_tag_directive_t
+ tag_directives_start int // The beginning of the tag directives list.
+ tag_directives_end int // The end of the tag directives list.
+
+ start_implicit int // Is the document start indicator implicit?
+ end_implicit int // Is the document end indicator implicit?
+
+ // The start/end of the document.
+ start_mark, end_mark yaml_mark_t
+}
+
+// The prototype of a read handler.
+//
+// The read handler is called when the parser needs to read more bytes from the
+// source. The handler should write not more than size bytes to the buffer.
+// The number of written bytes should be set to the size_read variable.
+//
+// [in,out] data A pointer to an application data specified by
+// yaml_parser_set_input().
+// [out] buffer The buffer to write the data from the source.
+// [in] size The size of the buffer.
+// [out] size_read The actual number of bytes read from the source.
+//
+// On success, the handler should return 1. If the handler failed,
+// the returned value should be 0. On EOF, the handler should set the
+// size_read to 0 and return 1.
+type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error)
+
+// This structure holds information about a potential simple key.
+type yaml_simple_key_t struct {
+ possible bool // Is a simple key possible?
+ required bool // Is a simple key required?
+ token_number int // The number of the token.
+ mark yaml_mark_t // The position mark.
+}
+
+// The states of the parser.
+type yaml_parser_state_t int
+
+const (
+ yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota
+
+ yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document.
+ yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START.
+ yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document.
+ yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END.
+ yaml_PARSE_BLOCK_NODE_STATE // Expect a block node.
+ yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence.
+ yaml_PARSE_FLOW_NODE_STATE // Expect a flow node.
+ yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence.
+ yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence.
+ yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence.
+ yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping.
+ yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key.
+ yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value.
+ yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence.
+ yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence.
+ yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping.
+ yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping.
+ yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry.
+ yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping.
+ yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping.
+ yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping.
+ yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping.
+ yaml_PARSE_END_STATE // Expect nothing.
+)
+
+func (ps yaml_parser_state_t) String() string {
+ switch ps {
+ case yaml_PARSE_STREAM_START_STATE:
+ return "yaml_PARSE_STREAM_START_STATE"
+ case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE:
+ return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE"
+ case yaml_PARSE_DOCUMENT_START_STATE:
+ return "yaml_PARSE_DOCUMENT_START_STATE"
+ case yaml_PARSE_DOCUMENT_CONTENT_STATE:
+ return "yaml_PARSE_DOCUMENT_CONTENT_STATE"
+ case yaml_PARSE_DOCUMENT_END_STATE:
+ return "yaml_PARSE_DOCUMENT_END_STATE"
+ case yaml_PARSE_BLOCK_NODE_STATE:
+ return "yaml_PARSE_BLOCK_NODE_STATE"
+ case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE:
+ return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE"
+ case yaml_PARSE_FLOW_NODE_STATE:
+ return "yaml_PARSE_FLOW_NODE_STATE"
+ case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE:
+ return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE"
+ case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE:
+ return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE"
+ case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE:
+ return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE"
+ case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE:
+ return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE"
+ case yaml_PARSE_BLOCK_MAPPING_KEY_STATE:
+ return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE"
+ case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE:
+ return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE"
+ case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE:
+ return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE"
+ case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE:
+ return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE"
+ case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE:
+ return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE"
+ case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE:
+ return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE"
+ case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE:
+ return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE"
+ case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE:
+ return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE"
+ case yaml_PARSE_FLOW_MAPPING_KEY_STATE:
+ return "yaml_PARSE_FLOW_MAPPING_KEY_STATE"
+ case yaml_PARSE_FLOW_MAPPING_VALUE_STATE:
+ return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE"
+ case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE:
+ return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE"
+ case yaml_PARSE_END_STATE:
+ return "yaml_PARSE_END_STATE"
+ }
+ return ""
+}
+
+// This structure holds aliases data.
+type yaml_alias_data_t struct {
+ anchor []byte // The anchor.
+ index int // The node id.
+ mark yaml_mark_t // The anchor mark.
+}
+
+// The parser structure.
+//
+// All members are internal. Manage the structure using the
+// yaml_parser_ family of functions.
+type yaml_parser_t struct {
+
+ // Error handling
+
+ error yaml_error_type_t // Error type.
+
+ problem string // Error description.
+
+ // The byte about which the problem occured.
+ problem_offset int
+ problem_value int
+ problem_mark yaml_mark_t
+
+ // The error context.
+ context string
+ context_mark yaml_mark_t
+
+ // Reader stuff
+
+ read_handler yaml_read_handler_t // Read handler.
+
+ input_file io.Reader // File input data.
+ input []byte // String input data.
+ input_pos int
+
+ eof bool // EOF flag
+
+ buffer []byte // The working buffer.
+ buffer_pos int // The current position of the buffer.
+
+ unread int // The number of unread characters in the buffer.
+
+ raw_buffer []byte // The raw buffer.
+ raw_buffer_pos int // The current position of the buffer.
+
+ encoding yaml_encoding_t // The input encoding.
+
+ offset int // The offset of the current position (in bytes).
+ mark yaml_mark_t // The mark of the current position.
+
+ // Scanner stuff
+
+ stream_start_produced bool // Have we started to scan the input stream?
+ stream_end_produced bool // Have we reached the end of the input stream?
+
+ flow_level int // The number of unclosed '[' and '{' indicators.
+
+ tokens []yaml_token_t // The tokens queue.
+ tokens_head int // The head of the tokens queue.
+ tokens_parsed int // The number of tokens fetched from the queue.
+ token_available bool // Does the tokens queue contain a token ready for dequeueing.
+
+ indent int // The current indentation level.
+ indents []int // The indentation levels stack.
+
+ simple_key_allowed bool // May a simple key occur at the current position?
+ simple_keys []yaml_simple_key_t // The stack of simple keys.
+
+ // Parser stuff
+
+ state yaml_parser_state_t // The current parser state.
+ states []yaml_parser_state_t // The parser states stack.
+ marks []yaml_mark_t // The stack of marks.
+ tag_directives []yaml_tag_directive_t // The list of TAG directives.
+
+ // Dumper stuff
+
+ aliases []yaml_alias_data_t // The alias data.
+
+ document *yaml_document_t // The currently parsed document.
+}
+
+// Emitter Definitions
+
+// The prototype of a write handler.
+//
+// The write handler is called when the emitter needs to flush the accumulated
+// characters to the output. The handler should write @a size bytes of the
+// @a buffer to the output.
+//
+// @param[in,out] data A pointer to an application data specified by
+// yaml_emitter_set_output().
+// @param[in] buffer The buffer with bytes to be written.
+// @param[in] size The size of the buffer.
+//
+// @returns On success, the handler should return @c 1. If the handler failed,
+// the returned value should be @c 0.
+//
+type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error
+
+type yaml_emitter_state_t int
+
+// The emitter states.
+const (
+ // Expect STREAM-START.
+ yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota
+
+ yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END.
+ yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END.
+ yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document.
+ yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END.
+ yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence.
+ yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence.
+ yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping.
+ yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping.
+ yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping.
+ yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping.
+ yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence.
+ yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence.
+ yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping.
+ yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping.
+ yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping.
+ yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping.
+ yaml_EMIT_END_STATE // Expect nothing.
+)
+
+// The emitter structure.
+//
+// All members are internal. Manage the structure using the @c yaml_emitter_
+// family of functions.
+type yaml_emitter_t struct {
+
+ // Error handling
+
+ error yaml_error_type_t // Error type.
+ problem string // Error description.
+
+ // Writer stuff
+
+ write_handler yaml_write_handler_t // Write handler.
+
+ output_buffer *[]byte // String output data.
+ output_file io.Writer // File output data.
+
+ buffer []byte // The working buffer.
+ buffer_pos int // The current position of the buffer.
+
+ raw_buffer []byte // The raw buffer.
+ raw_buffer_pos int // The current position of the buffer.
+
+ encoding yaml_encoding_t // The stream encoding.
+
+ // Emitter stuff
+
+ canonical bool // If the output is in the canonical style?
+ best_indent int // The number of indentation spaces.
+ best_width int // The preferred width of the output lines.
+ unicode bool // Allow unescaped non-ASCII characters?
+ line_break yaml_break_t // The preferred line break.
+
+ state yaml_emitter_state_t // The current emitter state.
+ states []yaml_emitter_state_t // The stack of states.
+
+ events []yaml_event_t // The event queue.
+ events_head int // The head of the event queue.
+
+ indents []int // The stack of indentation levels.
+
+ tag_directives []yaml_tag_directive_t // The list of tag directives.
+
+ indent int // The current indentation level.
+
+ flow_level int // The current flow level.
+
+ root_context bool // Is it the document root context?
+ sequence_context bool // Is it a sequence context?
+ mapping_context bool // Is it a mapping context?
+ simple_key_context bool // Is it a simple mapping key context?
+
+ line int // The current line.
+ column int // The current column.
+ whitespace bool // If the last character was a whitespace?
+ indention bool // If the last character was an indentation character (' ', '-', '?', ':')?
+ open_ended bool // If an explicit document end is required?
+
+ // Anchor analysis.
+ anchor_data struct {
+ anchor []byte // The anchor value.
+ alias bool // Is it an alias?
+ }
+
+ // Tag analysis.
+ tag_data struct {
+ handle []byte // The tag handle.
+ suffix []byte // The tag suffix.
+ }
+
+ // Scalar analysis.
+ scalar_data struct {
+ value []byte // The scalar value.
+ multiline bool // Does the scalar contain line breaks?
+ flow_plain_allowed bool // Can the scalar be expessed in the flow plain style?
+ block_plain_allowed bool // Can the scalar be expressed in the block plain style?
+ single_quoted_allowed bool // Can the scalar be expressed in the single quoted style?
+ block_allowed bool // Can the scalar be expressed in the literal or folded styles?
+ style yaml_scalar_style_t // The output style.
+ }
+
+ // Dumper stuff
+
+ opened bool // If the stream was already opened?
+ closed bool // If the stream was already closed?
+
+ // The information associated with the document nodes.
+ anchors *struct {
+ references int // The number of references.
+ anchor int // The anchor id.
+ serialized bool // If the node has been emitted?
+ }
+
+ last_anchor_id int // The last assigned anchor id.
+
+ document *yaml_document_t // The currently emitted document.
+}
diff --git a/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/yamlprivateh.go b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/yamlprivateh.go
new file mode 100644
index 00000000..8110ce3c
--- /dev/null
+++ b/go/src/github.com/bitrise-core/bitrise-init/vendor/gopkg.in/yaml.v1/yamlprivateh.go
@@ -0,0 +1,173 @@
+package yaml
+
+const (
+ // The size of the input raw buffer.
+ input_raw_buffer_size = 512
+
+ // The size of the input buffer.
+ // It should be possible to decode the whole raw buffer.
+ input_buffer_size = input_raw_buffer_size * 3
+
+ // The size of the output buffer.
+ output_buffer_size = 128
+
+ // The size of the output raw buffer.
+ // It should be possible to encode the whole output buffer.
+ output_raw_buffer_size = (output_buffer_size*2 + 2)
+
+ // The size of other stacks and queues.
+ initial_stack_size = 16
+ initial_queue_size = 16
+ initial_string_size = 16
+)
+
+// Check if the character at the specified position is an alphabetical
+// character, a digit, '_', or '-'.
+func is_alpha(b []byte, i int) bool {
+ return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-'
+}
+
+// Check if the character at the specified position is a digit.
+func is_digit(b []byte, i int) bool {
+ return b[i] >= '0' && b[i] <= '9'
+}
+
+// Get the value of a digit.
+func as_digit(b []byte, i int) int {
+ return int(b[i]) - '0'
+}
+
+// Check if the character at the specified position is a hex-digit.
+func is_hex(b []byte, i int) bool {
+ return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f'
+}
+
+// Get the value of a hex-digit.
+func as_hex(b []byte, i int) int {
+ bi := b[i]
+ if bi >= 'A' && bi <= 'F' {
+ return int(bi) - 'A' + 10
+ }
+ if bi >= 'a' && bi <= 'f' {
+ return int(bi) - 'a' + 10
+ }
+ return int(bi) - '0'
+}
+
+// Check if the character is ASCII.
+func is_ascii(b []byte, i int) bool {
+ return b[i] <= 0x7F
+}
+
+// Check if the character at the start of the buffer can be printed unescaped.
+func is_printable(b []byte, i int) bool {
+ return ((b[i] == 0x0A) || // . == #x0A
+ (b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E
+ (b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF
+ (b[i] > 0xC2 && b[i] < 0xED) ||
+ (b[i] == 0xED && b[i+1] < 0xA0) ||
+ (b[i] == 0xEE) ||
+ (b[i] == 0xEF && // #xE000 <= . <= #xFFFD
+ !(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF
+ !(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF))))
+}
+
+// Check if the character at the specified position is NUL.
+func is_z(b []byte, i int) bool {
+ return b[i] == 0x00
+}
+
+// Check if the beginning of the buffer is a BOM.
+func is_bom(b []byte, i int) bool {
+ return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF
+}
+
+// Check if the character at the specified position is space.
+func is_space(b []byte, i int) bool {
+ return b[i] == ' '
+}
+
+// Check if the character at the specified position is tab.
+func is_tab(b []byte, i int) bool {
+ return b[i] == '\t'
+}
+
+// Check if the character at the specified position is blank (space or tab).
+func is_blank(b []byte, i int) bool {
+ //return is_space(b, i) || is_tab(b, i)
+ return b[i] == ' ' || b[i] == '\t'
+}
+
+// Check if the character at the specified position is a line break.
+func is_break(b []byte, i int) bool {
+ return (b[i] == '\r' || // CR (#xD)
+ b[i] == '\n' || // LF (#xA)
+ b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
+ b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
+ b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029)
+}
+
+func is_crlf(b []byte, i int) bool {
+ return b[i] == '\r' && b[i+1] == '\n'
+}
+
+// Check if the character is a line break or NUL.
+func is_breakz(b []byte, i int) bool {
+ //return is_break(b, i) || is_z(b, i)
+ return ( // is_break:
+ b[i] == '\r' || // CR (#xD)
+ b[i] == '\n' || // LF (#xA)
+ b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
+ b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
+ b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
+ // is_z:
+ b[i] == 0)
+}
+
+// Check if the character is a line break, space, or NUL.
+func is_spacez(b []byte, i int) bool {
+ //return is_space(b, i) || is_breakz(b, i)
+ return ( // is_space:
+ b[i] == ' ' ||
+ // is_breakz:
+ b[i] == '\r' || // CR (#xD)
+ b[i] == '\n' || // LF (#xA)
+ b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
+ b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
+ b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
+ b[i] == 0)
+}
+
+// Check if the character is a line break, space, tab, or NUL.
+func is_blankz(b []byte, i int) bool {
+ //return is_blank(b, i) || is_breakz(b, i)
+ return ( // is_blank:
+ b[i] == ' ' || b[i] == '\t' ||
+ // is_breakz:
+ b[i] == '\r' || // CR (#xD)
+ b[i] == '\n' || // LF (#xA)
+ b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
+ b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
+ b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
+ b[i] == 0)
+}
+
+// Determine the width of the character.
+func width(b byte) int {
+ // Don't replace these by a switch without first
+ // confirming that it is being inlined.
+ if b&0x80 == 0x00 {
+ return 1
+ }
+ if b&0xE0 == 0xC0 {
+ return 2
+ }
+ if b&0xF0 == 0xE0 {
+ return 3
+ }
+ if b&0xF8 == 0xF0 {
+ return 4
+ }
+ return 0
+
+}