mirror of
https://github.com/linuxserver/core.git
synced 2026-02-20 05:07:19 +08:00
Merge remote-tracking branch 'origin/master' into ask-on-signup
Conflicts: node_modules/oldclient/plugins-server/c9.api.auth/auth-plugin.js node_modules/oldclient/plugins-server/c9.api.auth/auth.js
This commit is contained in:
commit
253f64e3f9
100
b9/b9
Executable file
100
b9/b9
Executable file
@ -0,0 +1,100 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_NAME=$BASH_SOURCE
|
||||
if [ -h $SCRIPT_NAME ]; then SCRIPT_NAME=$(readlink $SCRIPT_NAME); fi
|
||||
|
||||
cd $(dirname $SCRIPT_NAME)
|
||||
|
||||
B9_DIR=$(pwd)
|
||||
C9_DIR=$B9_DIR/..
|
||||
B9=$B9_DIR/b9
|
||||
|
||||
source ./lib/_init.sh
|
||||
|
||||
NODEJS=$(_b9_init_nodejs)
|
||||
NPM=$(_b9_init_npm)
|
||||
|
||||
TMPDIR=$(_b9_init_temp)
|
||||
TMP=$TMPDIR
|
||||
TEMP=$TMPDIR
|
||||
|
||||
DEBUG=""
|
||||
|
||||
for MODULE in ./lib/*.sh; do
|
||||
[ $MODULE == "./lib/_init.sh" ] && continue
|
||||
source $MODULE
|
||||
done
|
||||
|
||||
usage() {
|
||||
echo "Usage: $B9 [global options ...] COMMAND [commands options...]"
|
||||
echo
|
||||
echo "Cloud9 build tool"
|
||||
echo
|
||||
echo "Global options:"
|
||||
echo " --help show this help message"
|
||||
echo " --debug trace bash commands"
|
||||
echo
|
||||
echo "Commands:"
|
||||
echo
|
||||
echo "[Deployment]"
|
||||
echo " package Package and upload version of Cloud9"
|
||||
echo " deploy Deploy a Cloud9 version"
|
||||
echo ""
|
||||
echo "[Internal]"
|
||||
echo " check Run b9 tests"
|
||||
echo " exec COMMAND [ARGS] Run arbitrary b9 function"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
for ARG in "$@"; do
|
||||
case $ARG in
|
||||
--help|-h)
|
||||
usage
|
||||
;;
|
||||
--debug)
|
||||
DEBUG="--debug"
|
||||
B9="$B9 --debug"
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
ORIGINAL_COMMAND=$1
|
||||
case $ORIGINAL_COMMAND in
|
||||
package)
|
||||
COMMAND=b9_package
|
||||
;;
|
||||
deploy)
|
||||
COMMAND=b9_deploy
|
||||
;;
|
||||
prepare)
|
||||
COMMAND=b9_prepare
|
||||
;;
|
||||
check)
|
||||
COMMAND=b9_check
|
||||
;;
|
||||
exec) # for debugging only!
|
||||
shift
|
||||
COMMAND=$1
|
||||
;;
|
||||
"")
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echo "Invalid command. See $B9 --help for usage."
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
|
||||
if [ "$DEBUG" ]; then
|
||||
set -x
|
||||
fi
|
||||
|
||||
$COMMAND "$@"
|
||||
21
b9/containers/c9/Dockerfile
Normal file
21
b9/containers/c9/Dockerfile
Normal file
@ -0,0 +1,21 @@
|
||||
FROM debian:8.2
|
||||
|
||||
MAINTAINER Cloud9 IDE, Inc. <info@c9.io>
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
# ubuntu user
|
||||
RUN useradd --uid 1000 --shell /bin/bash -m --home-dir /home/ubuntu ubuntu && \
|
||||
chmod 777 /tmp
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y curl openssh-client rsync && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
# install nodejs
|
||||
RUN curl https://nodejs.org/dist/v0.10.41/node-v0.10.41-linux-x64.tar.gz | tar xvzf - -C /usr/local --strip-components=1
|
||||
|
||||
USER ubuntu
|
||||
|
||||
EXPOSE 8080
|
||||
WORKDIR /home/ubuntu/newclient
|
||||
51
b9/containers/newclient/Dockerfile
Normal file
51
b9/containers/newclient/Dockerfile
Normal file
@ -0,0 +1,51 @@
|
||||
FROM debian:8.2
|
||||
|
||||
MAINTAINER Cloud9 IDE, Inc. <info@c9.io>
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y curl
|
||||
|
||||
# ubuntu user
|
||||
RUN useradd --uid 1000 --shell /bin/bash -m --home-dir /home/ubuntu ubuntu && \
|
||||
chmod 777 /tmp
|
||||
|
||||
# make ubuntu user sudo
|
||||
RUN apt-get install -y sudo && \
|
||||
sed -i 's/%sudo\s.*/%sudo ALL=NOPASSWD:ALL/' /etc/sudoers && \
|
||||
usermod -a -G sudo ubuntu
|
||||
|
||||
RUN chown root:root /usr/bin/sudo && chmod 4755 /usr/bin/sudo
|
||||
|
||||
# install nodejs
|
||||
RUN mkdir /nodejs && curl https://nodejs.org/dist/v0.10.40/node-v0.10.40-linux-x64.tar.gz | tar xvzf - -C /usr/local --strip-components=1
|
||||
RUN npm install -g npm@2.14.11
|
||||
|
||||
# oldclient
|
||||
RUN apt-get install -y openssh-client
|
||||
|
||||
# test runner
|
||||
RUN npm install -g mocha
|
||||
|
||||
# install jsonalzyer dependencies
|
||||
RUN apt-get install -y golang tmux python python-pip pylint php5 ruby build-essential
|
||||
|
||||
# test runner
|
||||
RUN apt-get install -y redis-server
|
||||
|
||||
# for odev
|
||||
RUN apt-get install -y haproxy
|
||||
|
||||
# for CI
|
||||
RUN apt-get install -y git
|
||||
|
||||
ADD files/ssh_config /home/ubuntu/.ssh/config
|
||||
|
||||
USER ubuntu
|
||||
|
||||
# Cloud9 installer
|
||||
RUN curl -L https://raw.githubusercontent.com/c9/install/master/install.sh | bash
|
||||
|
||||
EXPOSE 8080
|
||||
VOLUME /home/ubuntu/newclient
|
||||
WORKDIR /home/ubuntu/newclient
|
||||
14
b9/containers/newclient/files/ssh_config
Normal file
14
b9/containers/newclient/files/ssh_config
Normal file
@ -0,0 +1,14 @@
|
||||
Host github.com
|
||||
User git
|
||||
Port 22
|
||||
Hostname github.com
|
||||
IdentityFile /home/ubuntu/.ssh/id_rsa_deploy
|
||||
TCPKeepAlive yes
|
||||
IdentitiesOnly yes
|
||||
StrictHostKeyChecking no
|
||||
|
||||
Host static.c9.io
|
||||
IdentityFile /home/ubuntu/.ssh/id_rsa_deploy
|
||||
StrictHostKeyChecking no
|
||||
TCPKeepAlive yes
|
||||
IdentitiesOnly yes
|
||||
17
b9/lib/_docker.sh
Normal file
17
b9/lib/_docker.sh
Normal file
@ -0,0 +1,17 @@
|
||||
_DO_NEWCLIENT_IMAGE=
|
||||
_b9_get_newclient_image() {
|
||||
if [ ! -z "$_DO_NEWCLIENT_IMAGE" ]; then
|
||||
echo $_DO_NEWCLIENT_IMAGE
|
||||
return
|
||||
fi
|
||||
|
||||
local RESULT=$(docker build -t newclient --rm $B9_DIR/containers/newclient)
|
||||
if [[ $(echo "$RESULT" | tail -n1) =~ Successfully\ built ]]; then
|
||||
_DO_NEWCLIENT_IMAGE=$(echo "$RESULT" | tail -n1 | awk '{print $3}')
|
||||
echo $_DO_NEWCLIENT_IMAGE
|
||||
return
|
||||
fi
|
||||
|
||||
echo $RESULT
|
||||
return 1
|
||||
}
|
||||
15
b9/lib/_git.sh
Normal file
15
b9/lib/_git.sh
Normal file
@ -0,0 +1,15 @@
|
||||
_b9_git_get_hash() {
|
||||
pushd $C9_DIR &> /dev/null
|
||||
|
||||
git rev-parse HEAD
|
||||
|
||||
popd &> /dev/null
|
||||
}
|
||||
|
||||
_b9_git_get_hash_short() {
|
||||
pushd $C9_DIR &> /dev/null
|
||||
|
||||
git rev-parse --short=10 HEAD
|
||||
|
||||
popd &> /dev/null
|
||||
}
|
||||
64
b9/lib/_init.sh
Normal file
64
b9/lib/_init.sh
Normal file
@ -0,0 +1,64 @@
|
||||
_b9_init_temp() {
|
||||
local TMPDIR
|
||||
local UNAME=$(id -n -u)
|
||||
|
||||
for TMPDIR in /var/lib/docker/tmp /tmp; do
|
||||
TMPDIR=$TMPDIR/$UNAME
|
||||
mkdir -p $TMPDIR &> /dev/null && break
|
||||
TMPDIR=""
|
||||
done
|
||||
|
||||
if [ -z "$TMPDIR" ]; then
|
||||
echo "Can't find temp dir" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm $(mktemp --tmpdir=$TMPDIR)
|
||||
|
||||
echo $TMPDIR
|
||||
}
|
||||
|
||||
_b9_init_nodejs() {
|
||||
local NODEJS
|
||||
|
||||
. ~/.nvm/nvm.sh &> /dev/null || :
|
||||
for NODEJS in $(which node) $(which nodejs) /usr/local/bin/node /usr/bin/nodejs; do
|
||||
[ -x $NODEJS ] && break
|
||||
NODEJS=""
|
||||
done
|
||||
|
||||
if [ -z "$NODEJS" ]; then
|
||||
echo "Can't find node executable" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo $NODEJS
|
||||
}
|
||||
|
||||
_b9_init_npm() {
|
||||
local NPM
|
||||
|
||||
for NPM in $(which npm) /usr/local/bin/npm /usr/bin/npm; do
|
||||
[ -x $NPM ] && break
|
||||
NPM=""
|
||||
done
|
||||
|
||||
if [ -z "$NPM" ]; then
|
||||
echo "Can't find npm executable" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo $NPM
|
||||
}
|
||||
|
||||
_B9_NODE_HELPER_INITIALIZED=0
|
||||
|
||||
_b9_init_node_helper() {
|
||||
[ "$_B9_NODE_HELPER_INITIALIZED" == "1" ] && return
|
||||
_B9_NODE_HELPER_INITIALIZED=1
|
||||
|
||||
pushd $B9_DIR/lib/js &> /dev/null
|
||||
rm -rf node_modules
|
||||
$NPM install
|
||||
popd &> /dev/null
|
||||
}
|
||||
8
b9/lib/_npm.sh
Normal file
8
b9/lib/_npm.sh
Normal file
@ -0,0 +1,8 @@
|
||||
_b9_npm() {
|
||||
local WORKDIR=$1
|
||||
shift
|
||||
docker run --rm -w /home/ubuntu/newclient -v $WORKDIR:/home/ubuntu/newclient -v $HOME/.ssh/id_rsa_deploy:/home/ubuntu/.ssh/id_rsa_deploy:ro --sig-proxy -a STDIN -a STDOUT -a STDERR $(_b9_get_newclient_image) npm "$@"
|
||||
# pushd $WORKDIR
|
||||
# npm "$@"
|
||||
# popd
|
||||
}
|
||||
6
b9/lib/check.sh
Normal file
6
b9/lib/check.sh
Normal file
@ -0,0 +1,6 @@
|
||||
source $C9_DIR/plugins/c9.docker/d9/_testing.sh
|
||||
|
||||
b9_check() {
|
||||
echo "Running B9 tests"
|
||||
_do_check_package
|
||||
}
|
||||
245
b9/lib/deploy.sh
Normal file
245
b9/lib/deploy.sh
Normal file
@ -0,0 +1,245 @@
|
||||
b9_deploy_usage() {
|
||||
echo "Usage: $B9 deploy SERVICES TREEISH SERVER_PATTERN [ARG...]"
|
||||
echo
|
||||
echo "Deploy a Cloud9 version"
|
||||
echo
|
||||
echo "Options:"
|
||||
echo " --settings=[all|beta|deploy|onlinedev] (default: all)"
|
||||
echo " --strategy=[slow_start|parallel|serial] Deploy strategy to use (default: slow_start)"
|
||||
echo " --regex Interpret server patter as regular expression"
|
||||
echo " --no-check skip the health check"
|
||||
exit 1
|
||||
}
|
||||
|
||||
b9_deploy() {
|
||||
[ "$1" == "--help" ] && b9_deploy_usage
|
||||
|
||||
local SERVICES=$1 && shift
|
||||
local TREEISH=$1 && shift
|
||||
local SERVER_PATTERN=$1 && shift
|
||||
|
||||
local SETTINGS=devel
|
||||
local DRY_RUN=""
|
||||
local ASSET="gcs"
|
||||
local USE_REGEX=""
|
||||
local NO_CHECK=""
|
||||
local TYPE=newclient
|
||||
local STRATEGY=slow_start
|
||||
|
||||
[ -z "$SERVICES" ] && b9_deploy_usage
|
||||
[ -z "$TREEISH" ] && b9_deploy_usage
|
||||
[ -z "$SERVER_PATTERN" ] && b9_deploy_usage
|
||||
|
||||
local ARG
|
||||
for ARG in "$@"; do
|
||||
case $ARG in
|
||||
--settings=*)
|
||||
SETTINGS="${ARG#*=}"
|
||||
shift
|
||||
;;
|
||||
--strategy=*)
|
||||
STRATEGY="${ARG#*=}"
|
||||
shift
|
||||
;;
|
||||
--docker)
|
||||
ASSET="docker"
|
||||
shift
|
||||
;;
|
||||
--no-check)
|
||||
NO_CHECK="--no-check"
|
||||
shift
|
||||
;;
|
||||
--regex)
|
||||
USE_REGEX="--regex"
|
||||
shift
|
||||
;;
|
||||
--dry-run)
|
||||
DRY_RUN="1"
|
||||
shift
|
||||
;;
|
||||
--help)
|
||||
b9_deploy_usage
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
b9_deploy_usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
[ "$SERVICES" == "docker" ] && TYPE=docker
|
||||
|
||||
local SERVER_LIST
|
||||
local VERSION
|
||||
|
||||
local TMPFILE=$(tempfile)
|
||||
b9_package $TREEISH --settings=$SETTINGS --type=$TYPE | tee $TMPFILE
|
||||
VERSION=$(cat $TMPFILE | tail -n1)
|
||||
rm $TMPFILE
|
||||
|
||||
SERVER_LIST="$(_b9_deploy_server_list $SERVER_PATTERN $USE_REGEX)"
|
||||
local CMD="$B9 exec _b9_deploy_one_from_${ASSET} $VERSION $SERVICES $SETTINGS $NO_CHECK"
|
||||
if [ "$DRY_RUN" == "1" ]; then
|
||||
CMD="echo $CMD"
|
||||
fi
|
||||
|
||||
_b9_deploy_release_event "$SERVICES" $SETTINGS $VERSION $SERVER_PATTERN
|
||||
_b9_deploy_strategy_${STRATEGY} "$SERVER_LIST" "$CMD"
|
||||
}
|
||||
|
||||
_b9_deploy_strategy_slow_start() {
|
||||
local SERVER_LIST=$1
|
||||
local CMD=$2
|
||||
|
||||
# first one
|
||||
$CMD $(echo "$SERVER_LIST" | head -n1)
|
||||
|
||||
# then two
|
||||
echo "$SERVER_LIST" | tail -n +2 | head -n2 | parallel --halt 2 $CMD
|
||||
|
||||
# then the rest
|
||||
echo "$SERVER_LIST" | tail -n +4 | parallel --halt 2 -j 15 $CMD
|
||||
}
|
||||
|
||||
_b9_deploy_strategy_parallel() {
|
||||
local SERVER_LIST=$1
|
||||
local CMD=$2
|
||||
|
||||
# first one
|
||||
$CMD $(echo "$SERVER_LIST" | head -n1)
|
||||
|
||||
# then the rest
|
||||
echo "$SERVER_LIST" | tail -n +2 | parallel --halt 2 -j 30 $CMD
|
||||
}
|
||||
|
||||
_b9_deploy_strategy_serial() {
|
||||
local SERVER_LIST=$1
|
||||
local CMD=$2
|
||||
|
||||
echo "$SERVER_LIST" | xargs -n1 $CMD
|
||||
}
|
||||
|
||||
_b9_deploy_server_list () {
|
||||
local SERVER_PATTERN=$1
|
||||
local USE_REGEX=$2
|
||||
$C9_DIR/scripts/gssh --no-cache $USE_REGEX --print-names "$SERVER_PATTERN" | shuf
|
||||
}
|
||||
|
||||
_b9_deploy_one_from_gcs() {
|
||||
local VERSION=$1
|
||||
local SERVICES=$2
|
||||
local SETTINGS=$3
|
||||
local SERVER=$4
|
||||
local NO_CHECK=$5
|
||||
|
||||
echo Deploying $VERSION \($SERVICES\) to $SERVER ... >&2
|
||||
|
||||
_b9_deploy_upload_from_gcs $VERSION $SERVER
|
||||
_b9_deploy_update_services $VERSION $SERVICES $SERVER $SETTINGS
|
||||
[ -z "$NO_CHECK" ] && _b9_deploy_check $SERVER $SERVICES $SETTINGS
|
||||
|
||||
echo Deployed $VERSION to $SERVER >&2
|
||||
}
|
||||
|
||||
_b9_deploy_upload_from_gcs() {
|
||||
local VERSION=$1
|
||||
local SERVER=$2
|
||||
|
||||
local TGZ
|
||||
TGZ=$(_d9_package_download_gcs $VERSION)
|
||||
|
||||
local VERSIONS_DIR="/home/ubuntu/versions"
|
||||
local TARGET_FILE=${VERSIONS_DIR}/$(basename $TGZ)
|
||||
local TARGET_DIR=${VERSIONS_DIR}/$(basename $TGZ ".tar.xz")
|
||||
|
||||
|
||||
_b9_deploy_ssh $SERVER "rm -rf $TARGET_DIR $TARGET_FILE; mkdir -p /home/ubuntu/versions/history"
|
||||
_b9_deploy_scp $TGZ $SERVER:$TARGET_FILE
|
||||
_b9_deploy_ssh $SERVER "cd /home/ubuntu/versions && tar xf $TARGET_FILE && rm $TARGET_FILE"
|
||||
}
|
||||
|
||||
_b9_deploy_update_services() {
|
||||
local VERSION=$1
|
||||
local SERVICES=$2
|
||||
local SERVER=$3
|
||||
local SETTINGS=$4
|
||||
|
||||
local TOTAL_VERSIONS_TO_KEEP=5
|
||||
|
||||
local VERSIONS_DIR="/home/ubuntu/versions"
|
||||
local TARGET_DIR=${VERSIONS_DIR}/$VERSION
|
||||
local BUILD_NAME=$(echo $VERSION | awk -F- '{printf "%s-%s-%s", $1, $2, $3}')
|
||||
|
||||
_b9_deploy_ssh $SERVER "
|
||||
for SERVICE in $(echo $SERVICES | sed 's/,/ /g'); do
|
||||
mv /home/ubuntu/\$SERVICE /home/ubuntu/versions/history/\$SERVICE-$(date +%FT%T) &>/dev/null;
|
||||
ln -s $TARGET_DIR /home/ubuntu/\$SERVICE;
|
||||
done
|
||||
~/supervisord_start_script.sh || ~/supervisord_start_script.sh -f || ~/supervisord_start_script.sh -f;
|
||||
cd /home/ubuntu/versions;
|
||||
ls -t 2>/dev/null | grep $BUILD_NAME | tail -n +$TOTAL_VERSIONS_TO_KEEP | xargs sudo rm -rf;"
|
||||
}
|
||||
|
||||
_b9_deploy_check() {
|
||||
local SERVER=$1
|
||||
local SERVICES=$2
|
||||
local SETTINGS=$3
|
||||
|
||||
echo $SERVICES | sed 's/,/\n/g' | parallel --halt 2 -j 0 $B9 exec _b9_deploy_check_one $SERVER $SETTINGS
|
||||
}
|
||||
|
||||
_b9_deploy_check_one() {
|
||||
local SERVER=$1
|
||||
local SETTINGS=$2
|
||||
local SERVICE=$3
|
||||
|
||||
local HOST
|
||||
local PORT
|
||||
local WAIT=default
|
||||
HOST=$(echo $SERVER | awk -F@ '{ print $2}')
|
||||
|
||||
if [ "$SERVICE" == "oldclient" ]; then
|
||||
SERVICE="c9"
|
||||
elif [ "$SERVICE" == "docker" ]; then
|
||||
WAIT=long
|
||||
SERVICE="docker-daemon"
|
||||
elif [[ $SERVICE =~ ^vfs-[0-9]$ ]]; then
|
||||
PORT="--port=804$(echo $SERVICE | awk -F- '{print $2}')"
|
||||
SERVICE="vfs"
|
||||
else
|
||||
SERVICE=${SERVICE//-/_}
|
||||
fi
|
||||
|
||||
if ! $C9_DIR/scripts/check-safe-deploy.sh --wait=$WAIT $PORT --server=$HOST --mode=$SETTINGS --service=$SERVICE; then
|
||||
echo "One or more safe deploy checks failed :(" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
_b9_deploy_release_event() {
|
||||
local SERVICES=$1
|
||||
local SETTINGS=$2
|
||||
local VERSION=$3
|
||||
local SERVER_PATTERN=$4
|
||||
|
||||
_b9_init_node_helper
|
||||
echo $SERVICES | sed 's/,/\n/g' | xargs -I '{}' -n1 $NODEJS $B9_DIR/lib/js/release_event.js '{}' $SETTINGS $VERSION $SERVER_PATTERN
|
||||
}
|
||||
|
||||
_b9_deploy_ssh() {
|
||||
/usr/bin/ssh \
|
||||
-o LogLevel=ERROR \
|
||||
-o StrictHostKeyChecking=no \
|
||||
-o UserKnownHostsFile=/dev/null \
|
||||
-i $(find ~/.ssh/ -name "*" | grep -Pe "./(google_compute_engine|id_rsa_ansible|id_rsa)$" | head -1)\
|
||||
"$@"
|
||||
}
|
||||
|
||||
_b9_deploy_scp() {
|
||||
/usr/bin/scp \
|
||||
-o LogLevel=ERROR \
|
||||
-o StrictHostKeyChecking=no \
|
||||
-o UserKnownHostsFile=/dev/null \
|
||||
-i $(find ~/.ssh/ -name "*" | grep -Pe "./(google_compute_engine|id_rsa_ansible|id_rsa)$" | head -1) \
|
||||
"$@"
|
||||
}
|
||||
118
b9/lib/js/filter_node_modules.js
Normal file
118
b9/lib/js/filter_node_modules.js
Normal file
@ -0,0 +1,118 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
|
||||
var DEFAULT_MODULES = ["c9", "simple-template", "architect", "amd-loader", "heapdump", "optimist"];
|
||||
var DEFAULT_SETTINGS = "deploy";
|
||||
|
||||
var npmBuild = require("architect-build/npm_build");
|
||||
var async = require("async");
|
||||
var fs = require("fs");
|
||||
var optimist = require("optimist");
|
||||
var _ = require("lodash");
|
||||
|
||||
module.exports = nodeModules;
|
||||
|
||||
if (!module.parent) {
|
||||
main(process.argv.slice(2), function(err) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
console.error("Stacktrace: ", err.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function main(argv, callback) {
|
||||
var options = optimist(argv)
|
||||
.usage("Usage: $0 [CONFIG_NAME] [--help]")
|
||||
.alias("s", "settings")
|
||||
.default("settings", DEFAULT_SETTINGS)
|
||||
.describe("settings", "Settings file to use")
|
||||
.default("source", __dirname + "/../../..")
|
||||
.describe("source", "Source directory")
|
||||
.describe("targetFile", "Target package.json")
|
||||
.boolean("help")
|
||||
.describe("help", "Show command line options.");
|
||||
|
||||
argv = options.argv;
|
||||
if (argv.help) {
|
||||
options.showHelp();
|
||||
return callback();
|
||||
}
|
||||
|
||||
if (argv._.length != 1) {
|
||||
options.showHelp();
|
||||
return callback();
|
||||
}
|
||||
var config = argv._[0];
|
||||
var settings = argv.settings;
|
||||
var source = argv.source;
|
||||
|
||||
nodeModules(source, config, settings, function(err, json) {
|
||||
if (err) return callback(err);
|
||||
|
||||
if (argv.targetFile)
|
||||
fs.writeFileSync(argv.targetFile, JSON.stringify(json, null, 2));
|
||||
else
|
||||
console.log(JSON.stringify(json, null, 2));
|
||||
});
|
||||
}
|
||||
|
||||
function calculateRequiredNodeModules(sourceDir, buildConfig, configNames, settingsName, callback) {
|
||||
if (buildConfig.nodeModulesInclude === "*") { // If the user specifically asks for all don't bother calculating
|
||||
return callback();
|
||||
}
|
||||
|
||||
async.map(configNames.split(","), calculateModules, function (err, modules) {
|
||||
if (err) return callback(err);
|
||||
return callback(null, _.uniq(_.flatten(modules))); /* Flatten array and remove duplicates */
|
||||
});
|
||||
|
||||
function calculateModules (configName, done) {
|
||||
npmBuild({
|
||||
root: sourceDir,
|
||||
args: [configName, '-s', settingsName]
|
||||
}, function (err, result) {
|
||||
if (err) return done(err);
|
||||
|
||||
var deps = result.roots;
|
||||
|
||||
var nodeModules = [];
|
||||
deps.forEach(function (dep) {
|
||||
if (dep.match(/node_modules/)) {
|
||||
nodeModules.push(dep.replace(/node_modules\//, ""));
|
||||
}
|
||||
});
|
||||
nodeModules.sort();
|
||||
return done(null, nodeModules);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function nodeModules(sourceDir, configNames, settingsName, callback) {
|
||||
var buildConfig = require(sourceDir + "/configs/" + configNames).buildConfig({mode: settingsName});
|
||||
var packageJson = require(sourceDir + "/package.json");
|
||||
|
||||
var nodeModules = packageJson.dependencies;
|
||||
|
||||
delete packageJson.devDependencies;
|
||||
delete packageJson.scripts;
|
||||
|
||||
/* Calculates what modules are needed for this config, so they can be contact'd to nodeModulesInclude */
|
||||
calculateRequiredNodeModules(sourceDir, buildConfig, configNames, settingsName, function(err, modules) {
|
||||
if (err) return callback(err);
|
||||
|
||||
var allModules = buildConfig.nodeModulesInclude.concat(modules).concat(DEFAULT_MODULES);
|
||||
|
||||
packageJson.dependencies = allModules.reduce(function(deps, name) {
|
||||
if (nodeModules[name])
|
||||
deps[name] = nodeModules[name];
|
||||
else
|
||||
deps[name] = "*";
|
||||
|
||||
return deps;
|
||||
}, {});
|
||||
|
||||
callback(null, packageJson);
|
||||
});
|
||||
}
|
||||
23
b9/lib/js/filter_node_modules_test.js
Normal file
23
b9/lib/js/filter_node_modules_test.js
Normal file
@ -0,0 +1,23 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/*global describe it before after beforeEach afterEach */
|
||||
"use strict";
|
||||
"use server";
|
||||
|
||||
require("c9/inline-mocha")(module, null, { globals: ["define"]});
|
||||
|
||||
var assert = require("assert-diff");
|
||||
var nodeModules = require("./filter_node_modules");
|
||||
|
||||
describe(__filename, function() {
|
||||
|
||||
it("should filter node modules for docker", function(done) {
|
||||
nodeModules(__dirname + "/../../..", "docker", "deploy", function(err, config) {
|
||||
assert(!err, err);
|
||||
|
||||
assert(config.dependencies["optimist"]);
|
||||
assert(!config.dependencies["nodemailer-smtp-transport"]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
100
b9/lib/js/generate_settings.js
Normal file
100
b9/lib/js/generate_settings.js
Normal file
@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
|
||||
var DEFAULT_SETTINGS = "deploy";
|
||||
var ALWAYS_INCLUDE_SETTINGS = ["node", "mode", "manifest", "domains", "primaryDomain", "primaryBaseUrl", "baseUrlPattern"];
|
||||
|
||||
var fs = require("fs");
|
||||
var optimist = require("optimist");
|
||||
var loadManifest = require("c9/manifest").load;
|
||||
var reJSON = require("c9/json-with-re");
|
||||
|
||||
module.exports = generateSettings;
|
||||
|
||||
if (!module.parent) {
|
||||
main(process.argv.slice(2), function(err) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
console.error("Stacktrace: ", err.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function main(argv, callback) {
|
||||
var options = optimist(argv)
|
||||
.usage("Usage: $0 [CONFIG_NAME] [--help]")
|
||||
.alias("s", "settings")
|
||||
.default("settings", DEFAULT_SETTINGS)
|
||||
.describe("settings", "Settings file to use")
|
||||
.default("source", __dirname + "/../../..")
|
||||
.describe("source", "Source directory")
|
||||
.describe("targetFile", "Target package.json")
|
||||
.boolean("help")
|
||||
.describe("help", "Show command line options.");
|
||||
|
||||
argv = options.argv;
|
||||
if (argv.help) {
|
||||
options.showHelp();
|
||||
return callback();
|
||||
}
|
||||
|
||||
if (argv._.length != 1) {
|
||||
options.showHelp();
|
||||
return callback();
|
||||
}
|
||||
var config = argv._[0];
|
||||
var settingsName = argv.settings;
|
||||
var source = argv.source;
|
||||
|
||||
generateSettings(source, config, settingsName, function(err, contents) {
|
||||
if (err) return callback(err);
|
||||
|
||||
if (argv.targetFile)
|
||||
fs.writeFileSync(argv.targetFile, contents);
|
||||
else
|
||||
console.log(contents);
|
||||
});
|
||||
}
|
||||
|
||||
function generateSettings(source, config, settingsName, callback) {
|
||||
// Check if build already exists.
|
||||
var manifest = loadManifest(source);
|
||||
manifest.hostname = "[%type%]-[%provider%]-[%region%]-[%index%]-[%env%]";
|
||||
|
||||
var oldSettings;
|
||||
try {
|
||||
oldSettings = require(source + "/settings/" + settingsName)(manifest);
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
|
||||
var buildConfig = require(source + "/configs/" + config).buildConfig({mode: settingsName});
|
||||
|
||||
if (buildConfig.settingsInclude == "*") {
|
||||
newSettings = oldSettings;
|
||||
}
|
||||
else {
|
||||
buildConfig.settingsInclude = buildConfig.settingsInclude.concat(ALWAYS_INCLUDE_SETTINGS);
|
||||
var newSettings =
|
||||
buildConfig.settingsInclude.reduce(function(settings, name) {
|
||||
settings[name] = oldSettings[name];
|
||||
return settings;
|
||||
}, {});
|
||||
}
|
||||
|
||||
newSettings.node = oldSettings.node;
|
||||
|
||||
var contents =
|
||||
"var hostname = require('c9/hostname');\n" +
|
||||
"var reJSON = require('c9/json-with-re');\n" +
|
||||
"var fill = require('simple-template').fill;\n" +
|
||||
"module.exports = function() {\n" +
|
||||
" var options = hostname.parse(hostname.get());\n" +
|
||||
" options.root = __dirname + '/..';\n" +
|
||||
" var template = " + reJSON.stringify(newSettings, 2).replace(new RegExp(source, "g"), "[%root%]") + ";\n" +
|
||||
" return reJSON.parse(fill(JSON.stringify(template), options));\n" +
|
||||
"};";
|
||||
|
||||
callback(null, contents);
|
||||
}
|
||||
52
b9/lib/js/generate_settings_test.js
Normal file
52
b9/lib/js/generate_settings_test.js
Normal file
@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/*global describe it before after beforeEach afterEach */
|
||||
"use strict";
|
||||
"use server";
|
||||
|
||||
require("c9/inline-mocha")(module, null, { globals: ["define"]});
|
||||
|
||||
var assert = require("assert-diff");
|
||||
var vm = require("vm");
|
||||
var generateSettings = require("./generate_settings");
|
||||
|
||||
describe(__filename, function() {
|
||||
|
||||
it("should filter settings file", function(done) {
|
||||
generateSettings(__dirname + "/../../..", "docker", "deploy", function(err, settings) {
|
||||
assert(!err, err);
|
||||
|
||||
settings = eval(settings)();
|
||||
|
||||
assert(settings.docker);
|
||||
assert(settings["docker-daemon"]);
|
||||
assert(settings.aws);
|
||||
assert(settings.sapi);
|
||||
assert(settings.rabbitmq);
|
||||
|
||||
assert(!settings.c9);
|
||||
assert(!settings.auth);
|
||||
assert(!settings.worker);
|
||||
assert(!settings.captcha);
|
||||
assert(!settings.sendgrid);
|
||||
assert(!settings.redis);
|
||||
assert(!settings["redis-slave"]);
|
||||
assert(!settings.sessionredis);
|
||||
assert(!settings["sessionredis-slave"]);
|
||||
assert(!settings.github);
|
||||
assert(!settings.bitbucket);
|
||||
assert(!settings.salesforce);
|
||||
assert(!settings.google);
|
||||
assert(!settings.c9_auth);
|
||||
assert(!settings.services);
|
||||
assert(!settings.mailer);
|
||||
assert(!settings.zuora);
|
||||
assert(!settings.pricing);
|
||||
assert(!settings.catalog);
|
||||
assert(!settings.minfraud);
|
||||
assert(!settings.support);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
69
b9/lib/js/list_plugins.js
Normal file
69
b9/lib/js/list_plugins.js
Normal file
@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
|
||||
var DEFAULT_SETTINGS = "deploy";
|
||||
|
||||
var optimist = require("optimist");
|
||||
var loadManifest = require("c9/manifest").load;
|
||||
|
||||
module.exports = listPlugins;
|
||||
|
||||
if (!module.parent) {
|
||||
main(process.argv.slice(2), function(err) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
console.error("Stacktrace: ", err.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function main(argv, callback) {
|
||||
var options = optimist(argv)
|
||||
.usage("Usage: $0 [CONFIG_NAME] [--help]")
|
||||
.alias("s", "settings")
|
||||
.default("settings", DEFAULT_SETTINGS)
|
||||
.describe("settings", "Settings file to use")
|
||||
.default("source", __dirname + "/../../..")
|
||||
.describe("source", "Source directory")
|
||||
.boolean("help")
|
||||
.describe("help", "Show command line options.");
|
||||
|
||||
argv = options.argv;
|
||||
if (argv.help) {
|
||||
options.showHelp();
|
||||
return callback();
|
||||
}
|
||||
|
||||
if (argv._.length != 1) {
|
||||
options.showHelp();
|
||||
return callback();
|
||||
}
|
||||
var config = argv._[0];
|
||||
var settingsName = argv.settings;
|
||||
var source = argv.source;
|
||||
|
||||
listPlugins(source, config, settingsName).forEach(function(line) {
|
||||
console.log(line);
|
||||
});
|
||||
}
|
||||
|
||||
function listPlugins(source, configName, settingsName) {
|
||||
var manifest = loadManifest(source);
|
||||
manifest.hostname = "[%type%]-[%provider%]-[%region%]-[%index%]-[%env%]";
|
||||
|
||||
var settings = require(source + "/settings/" + settingsName)(manifest);
|
||||
var config = require(source + "/configs/" + configName)(settings, optimist([]));
|
||||
|
||||
var plugins = Object.keys(config.reduce(function(processedPlugins, plugin) {
|
||||
var packagePath = plugin.packagePath || plugin;
|
||||
if (packagePath.indexOf("./") === 0) {
|
||||
var pluginDir = packagePath.slice(2, packagePath.indexOf("/", 2));
|
||||
processedPlugins[pluginDir] = true;
|
||||
}
|
||||
|
||||
return processedPlugins;
|
||||
}, {}));
|
||||
|
||||
return plugins;
|
||||
}
|
||||
22
b9/lib/js/list_plugins_test.js
Normal file
22
b9/lib/js/list_plugins_test.js
Normal file
@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/*global describe it before after beforeEach afterEach */
|
||||
"use strict";
|
||||
"use server";
|
||||
|
||||
require("c9/inline-mocha")(module, null, { globals: ["define"]});
|
||||
|
||||
var assert = require("assert-diff");
|
||||
var listPlugins = require("./list_plugins");
|
||||
|
||||
describe(__filename, function() {
|
||||
|
||||
it("should filter node modules for docker", function() {
|
||||
var list = listPlugins(__dirname + "/../../..", "docker", "deploy");
|
||||
|
||||
assert(list.indexOf("c9.docker") >= 0);
|
||||
assert(list.indexOf("c9.mq") >= 0);
|
||||
|
||||
assert(list.indexOf("c9.db.redis") == -1);
|
||||
});
|
||||
});
|
||||
9
b9/lib/js/package.json
Normal file
9
b9/lib/js/package.json
Normal file
@ -0,0 +1,9 @@
|
||||
{
|
||||
"name": "b9-utils",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"async": "^1.5.2",
|
||||
"lodash": "^4.2.1",
|
||||
"optimist": "^0.6.1"
|
||||
}
|
||||
}
|
||||
61
b9/lib/js/release_event.js
Normal file
61
b9/lib/js/release_event.js
Normal file
@ -0,0 +1,61 @@
|
||||
"use strict";
|
||||
|
||||
var https = require("https");
|
||||
|
||||
var DATADOG_API_KEY = '64e56d39dfdd7f2bbf06f09100d51a18';
|
||||
var DATADOG_API_URL = 'https://app.datadoghq.com';
|
||||
|
||||
module.exports = releaseEvent;
|
||||
|
||||
if (!module.parent) {
|
||||
var argv = process.argv;
|
||||
releaseEvent(argv[2], argv[3], argv[4], argv[5], function(err) {
|
||||
if (err) {
|
||||
console.error("Error posting release event to datadog" + err.message);
|
||||
process.exit(1);
|
||||
}
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
|
||||
function datadogEvent(msg, callback) {
|
||||
|
||||
var payload = JSON.stringify(msg);
|
||||
|
||||
var req = https.request({
|
||||
hostname: "app.datadoghq.com",
|
||||
port: 443,
|
||||
path: "/api/v1/events?api_key=" + encodeURIComponent(DATADOG_API_KEY),
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Accept": "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"Content-Length": payload.length
|
||||
}
|
||||
}, function(res) {
|
||||
if (res.statusCode >= 400)
|
||||
return callback(new Error("request failed with status code " + res.statusCode));
|
||||
|
||||
callback();
|
||||
});
|
||||
|
||||
req.on("error", function(e) {
|
||||
callback(e);
|
||||
});
|
||||
|
||||
req.write(payload);
|
||||
req.end();
|
||||
}
|
||||
|
||||
function releaseEvent(application, mode, version, pattern, callback) {
|
||||
datadogEvent({
|
||||
title: 'Release: ' + application + ' version ' + version + ' to "' + pattern + '"',
|
||||
tags: [
|
||||
'release',
|
||||
'application:' + application,
|
||||
'mode:' + mode,
|
||||
'version:' + version,
|
||||
'pattern:' + pattern
|
||||
]
|
||||
}, callback);
|
||||
}
|
||||
197
b9/lib/package.sh
Normal file
197
b9/lib/package.sh
Normal file
@ -0,0 +1,197 @@
|
||||
readonly B9_PACKAGE_GIT_CACHE=$C9_DIR
|
||||
|
||||
b9_package_usage() {
|
||||
echo "Usage: $B9 package TREEISH [ARG...]"
|
||||
echo
|
||||
echo "Package and upload a version of Cloud 9"
|
||||
echo
|
||||
echo "Options:"
|
||||
echo " --settings=[all|beta|deploy|onlinedev] (default: all)"
|
||||
echo " --type=[newclient|docker] (default: newclient)"
|
||||
echo " --no-cache"
|
||||
exit 1
|
||||
}
|
||||
|
||||
b9_package() {
|
||||
[ "$1" == "--help" ] && b9_package_usage
|
||||
|
||||
local TREEISH=$1
|
||||
local TYPE=newclient
|
||||
local SETTINGS=all
|
||||
local STORAGE=gcs
|
||||
local USE_CACHE=1
|
||||
|
||||
[ -z "$TREEISH" ] && b9_package_usage
|
||||
shift
|
||||
|
||||
local ARG
|
||||
for ARG in "$@"; do
|
||||
case $ARG in
|
||||
--settings=*)
|
||||
SETTINGS="${ARG#*=}"
|
||||
shift
|
||||
;;
|
||||
--type=*)
|
||||
TYPE="${ARG#*=}"
|
||||
shift
|
||||
;;
|
||||
--docker)
|
||||
STORAGE=docker
|
||||
shift
|
||||
;;
|
||||
--no-cache)
|
||||
USE_CACHE=0
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
b9_package_usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
local VERSION
|
||||
local WORKDIR
|
||||
|
||||
[ "$TYPE" == "newclient" ] && SETTINGS=all
|
||||
if [ "$TYPE" == "docker" ] && [ "$SETTINGS" == "all" ]; then
|
||||
echo "You must define settings when packaging the docker daemon" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
_b9_package_init_git_cache
|
||||
|
||||
VERSION=c9-${TYPE}-${SETTINGS}-$(_b9_get_version $TREEISH)
|
||||
|
||||
if [ "$USE_CACHE" == "1" ] && _b9_package_is_cached $STORAGE $VERSION; then
|
||||
echo $VERSION
|
||||
return
|
||||
fi
|
||||
|
||||
WORKDIR=$(_d9_package_init_work_dir $VERSION)
|
||||
_d9_package_sync_workdir $TYPE $WORKDIR $VERSION $SETTINGS
|
||||
_d9_package_npm_install $WORKDIR
|
||||
_d9_package_cleanup_workdir $WORKDIR
|
||||
_d9_package_upload_${STORAGE} $WORKDIR $VERSION
|
||||
|
||||
echo $VERSION
|
||||
}
|
||||
|
||||
_b9_package_init_git_cache() {
|
||||
pushd $B9_PACKAGE_GIT_CACHE &> /dev/null
|
||||
|
||||
if [ ! -d .git ]; then
|
||||
git clone git@github.com:c9/newclient.git .
|
||||
fi
|
||||
|
||||
git fetch origin
|
||||
|
||||
popd &> /dev/null
|
||||
}
|
||||
|
||||
_d9_package_init_work_dir() {
|
||||
local VERSION=$1
|
||||
local WORK_DIR=$TMP/${VERSION}
|
||||
mkdir -p $WORK_DIR
|
||||
|
||||
echo $WORK_DIR
|
||||
}
|
||||
|
||||
_b9_get_version() {
|
||||
local TREEISH=$1
|
||||
|
||||
pushd $B9_PACKAGE_GIT_CACHE &> /dev/null
|
||||
echo $(git show $TREEISH:package.json | jq -r .version)-$(git rev-parse --short=8 $TREEISH)
|
||||
popd &> /dev/null
|
||||
}
|
||||
|
||||
_b9_package_is_cached() {
|
||||
local STORAGE=$1
|
||||
local VERSION=$2
|
||||
|
||||
case $STORAGE in
|
||||
gcs)
|
||||
_b9_package_is_cached_gcs $VERSION
|
||||
;;
|
||||
docker)
|
||||
_b9_package_is_cached_docker $VERSION
|
||||
;;
|
||||
*)
|
||||
echo "Invalid storage type: $STORAGE"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_d9_package_upload() {
|
||||
local STORAGE=$1
|
||||
local $WORKDIR=$2
|
||||
local $VERSION=$3
|
||||
|
||||
case $STORAGE in
|
||||
gcs)
|
||||
_d9_package_upload_gcs $WORKDIR $VERSION
|
||||
;;
|
||||
docker)
|
||||
_d9_package_upload_docker $WORKDIR $VERSION
|
||||
;;
|
||||
*)
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_d9_package_sync_workdir() {
|
||||
local TYPE=$1
|
||||
local WORKDIR=$2
|
||||
local VERSION=$3
|
||||
local SETTINGS=$4
|
||||
|
||||
case $TYPE in
|
||||
newclient)
|
||||
_d9_package_sync_workdir_newclient $WORKDIR $VERSION $SETTINGS
|
||||
;;
|
||||
docker)
|
||||
_d9_package_sync_workdir_docker $WORKDIR $VERSION $SETTINGS
|
||||
;;
|
||||
*)
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_d9_package_npm_install() {
|
||||
local WORKDIR=$1
|
||||
|
||||
pushd $WORKDIR &> /dev/null
|
||||
_b9_install_deps
|
||||
popd &> /dev/null
|
||||
}
|
||||
|
||||
_d9_package_cleanup_workdir() {
|
||||
local WORKDIR=$1
|
||||
local REVISION
|
||||
[ -z "$WORKDIR" ] && return 1
|
||||
|
||||
pushd $WORKDIR &> /dev/null
|
||||
|
||||
_d9_package_patch_package_json
|
||||
rm -rf .git build bin local
|
||||
|
||||
popd &> /dev/null
|
||||
}
|
||||
|
||||
_d9_package_patch_package_json() {
|
||||
[ ! -d .git ] && return 0
|
||||
|
||||
REVISION=$(git rev-parse HEAD)
|
||||
mv package.json _package.json
|
||||
cat _package.json | jq ".revision=\"$REVISION\"" > package.json
|
||||
rm _package.json
|
||||
}
|
||||
|
||||
_do_check_package() {
|
||||
b9_package origin/master --type=newclient --no-cache
|
||||
b9_package origin/master --type=newclient
|
||||
b9_package origin/master --type=docker --settings=deploy --no-cache
|
||||
b9_package origin/master --docker --no-cache
|
||||
}
|
||||
116
b9/lib/package_service_docker.sh
Normal file
116
b9/lib/package_service_docker.sh
Normal file
@ -0,0 +1,116 @@
|
||||
_d9_package_sync_workdir_docker() {
|
||||
local WORKDIR=$1
|
||||
local VERSION=$2
|
||||
local SETTINGS=$3
|
||||
local SOURCE=$WORKDIR/source
|
||||
|
||||
pushd $WORKDIR &> /dev/null
|
||||
|
||||
_b9_package_docker_init_source $WORKDIR $SOURCE $VERSION $SETTINGS
|
||||
_b9_package_docker_init_workdir
|
||||
|
||||
_b9_init_node_helper
|
||||
_b9_package_docker_node_modules $WORKDIR $SOURCE $SETTINGS
|
||||
_b9_package_docker_generate_settings $WORKDIR $SOURCE $SETTINGS
|
||||
_b9_package_docker_include_files $WORKDIR $SOURCE $SETTINGS
|
||||
_b9_package_docker_copy_plugins $WORKDIR $SOURCE $SETTINGS
|
||||
|
||||
rm -rf $SOURCE
|
||||
|
||||
popd &> /dev/null
|
||||
}
|
||||
|
||||
_b9_package_docker_init_source() {
|
||||
local WORKDIR=$1
|
||||
local SOURCE=$2
|
||||
local VERSION=$3
|
||||
local SETTINGS=$4
|
||||
|
||||
rm -rf $WORKDIR
|
||||
mkdir -p $SOURCE
|
||||
|
||||
_d9_package_sync_workdir_newclient $SOURCE $VERSION $SETTINGS
|
||||
_d9_package_npm_install $SOURCE
|
||||
}
|
||||
|
||||
_b9_package_docker_init_workdir() {
|
||||
mkdir -p plugins
|
||||
mkdir -p node_modules
|
||||
mkdir -p settings
|
||||
mkdir -p configs
|
||||
}
|
||||
|
||||
_b9_package_docker_node_modules() {
|
||||
local WORKDIR=$1
|
||||
local SOURCE=$2
|
||||
local SETTINGS=$3
|
||||
local NODE_MODULES
|
||||
local MODULE
|
||||
|
||||
$NODEJS $B9_DIR/lib/js/filter_node_modules.js docker --targetFile=$WORKDIR/package.json --source=$SOURCE --settings=$SETTINGS
|
||||
|
||||
NODE_MODULES=$(cat $WORKDIR/package.json | jq -r '.dependencies | keys | @sh')
|
||||
|
||||
mkdir -p $WORKDIR/node_modules
|
||||
for MODULE in $NODE_MODULES; do
|
||||
MODULE=${MODULE:1:-1}
|
||||
if [ -d $SOURCE/node_modules/$MODULE ]; then
|
||||
cp -a $SOURCE/node_modules/$MODULE $WORKDIR/node_modules
|
||||
fi
|
||||
done
|
||||
|
||||
pushd $WORKDIR &> /dev/null
|
||||
_b9_npm "$WORKDIR" install
|
||||
|
||||
popd &> /dev/null
|
||||
}
|
||||
|
||||
_b9_package_docker_generate_settings() {
|
||||
local WORKDIR=$1
|
||||
local SOURCE=$2
|
||||
local SETTINGS=$3
|
||||
|
||||
$NODEJS $B9_DIR/lib/js/generate_settings.js docker --targetFile=$WORKDIR/settings/$SETTINGS.js --source=$SOURCE --settings=$SETTINGS
|
||||
}
|
||||
|
||||
_b9_package_docker_include_files() {
|
||||
local WORKDIR=$1
|
||||
local SOURCE=$2
|
||||
local SETTINGS=$3
|
||||
|
||||
local BUILD_CONFIG
|
||||
local FILE_MODULES_INCLUDE
|
||||
local PATTERN
|
||||
|
||||
pushd $WORKDIR &> /dev/null
|
||||
|
||||
BUILD_CONFIG=$($NODEJS -e "console.log(JSON.stringify(require('$SOURCE/configs/docker').buildConfig({mode: '$SETTINGS'})))")
|
||||
FILE_INCLUDE=$(echo $BUILD_CONFIG | jq -r '.fileInclude | @sh')
|
||||
|
||||
for PATTERN in $FILE_INCLUDE; do
|
||||
PATTERN=${PATTERN:1:-1}
|
||||
mkdir -p $(dirname $PATTERN)
|
||||
cp -a -R $SOURCE/$PATTERN $(dirname $PATTERN)
|
||||
done
|
||||
|
||||
for PATTERN in "server.js" "scripts/tail-log.sh" "configs/docker.js"; do
|
||||
mkdir -p $(dirname $PATTERN)
|
||||
cp -a -R $SOURCE/$PATTERN $(dirname $PATTERN) || :
|
||||
done
|
||||
|
||||
popd &> /dev/null
|
||||
}
|
||||
|
||||
_b9_package_docker_copy_plugins() {
|
||||
local WORKDIR=$1
|
||||
local SOURCE=$2
|
||||
local SETTINGS=$3
|
||||
|
||||
local PLUGINS
|
||||
local PLUGIN
|
||||
|
||||
PLUGINS=$($NODEJS $B9_DIR/lib/js/list_plugins.js docker --source=$SOURCE --settings=$SETTINGS)
|
||||
for PLUGIN in $PLUGINS; do
|
||||
cp -a $SOURCE/plugins/$PLUGIN $WORKDIR/plugins
|
||||
done
|
||||
}
|
||||
17
b9/lib/package_service_newclient.sh
Normal file
17
b9/lib/package_service_newclient.sh
Normal file
@ -0,0 +1,17 @@
|
||||
_d9_package_sync_workdir_newclient() {
|
||||
local WORKDIR=$1
|
||||
local VERSION=$2
|
||||
local HASH
|
||||
|
||||
HASH=$(echo $VERSION | awk -F- '{print $5}')
|
||||
|
||||
rm -rf $WORKDIR
|
||||
mkdir -p $WORKDIR
|
||||
|
||||
pushd $WORKDIR &> /dev/null
|
||||
rsync -qrtv --delete $B9_PACKAGE_GIT_CACHE/.git $WORKDIR/
|
||||
git reset --hard
|
||||
git checkout $HASH
|
||||
|
||||
popd &> /dev/null
|
||||
}
|
||||
74
b9/lib/package_storage_docker.sh
Normal file
74
b9/lib/package_storage_docker.sh
Normal file
@ -0,0 +1,74 @@
|
||||
readonly B9_DOCKER_REGISTRY=gcr.io/c9.io/cloud9gce
|
||||
readonly B9_DOCKER_BUCKET=gs://artifacts.cloud9gce.c9.io.a.appspot.com
|
||||
|
||||
_b9_dockerize_update_base() {
|
||||
local TREEISH=origin/master
|
||||
|
||||
local CID
|
||||
local VERSION
|
||||
|
||||
# build package
|
||||
local TMPFILE=$(tempfile)
|
||||
b9_package $TREEISH --type=newclient | tee $TMPFILE
|
||||
VERSION=$(cat $TMPFILE | tail -n1)
|
||||
rm $TMPFILE
|
||||
|
||||
# build base image
|
||||
docker build --rm -t /v $TMP:/tmp $B9_DOCKER_REGISTRY/c9:base $B9_DIR/containers/c9
|
||||
|
||||
CID=$(docker run -d $B9_DOCKER_REGISTRY/c9:base sleep 1h)
|
||||
|
||||
# copy package to base
|
||||
docker exec $CID bash -c "
|
||||
cd /home/ubuntu &&
|
||||
tar xf $TMP/$VERSION.tar.xz
|
||||
rm -rf $VERSION.tgz newclient
|
||||
mv $VERSION newclient"
|
||||
|
||||
# commit image
|
||||
docker stop $CID
|
||||
docker commit $CID $B9_DOCKER_REGISTRY/c9:base
|
||||
|
||||
# push
|
||||
gcloud docker push $B9_DOCKER_REGISTRY/c9:base
|
||||
}
|
||||
|
||||
_b9_package_is_cached_docker() {
|
||||
local VERSION=$1
|
||||
local TAG
|
||||
|
||||
TAG=$(echo $VERSION | awk -F- '{printf "%s-%s", $4, $5}')
|
||||
_b9_dockerize_has_tag c9 $TAG
|
||||
}
|
||||
|
||||
_d9_package_upload_docker() {
|
||||
local WORKDIR=$1
|
||||
local VERSION=$2
|
||||
|
||||
local CID
|
||||
local TAG
|
||||
|
||||
gcloud docker pull $B9_DOCKER_REGISTRY/c9:base
|
||||
|
||||
CID=$(docker run -d -v $WORKDIR:/home/ubuntu/$(basename $WORKDIR):ro $B9_DOCKER_REGISTRY/c9:base sleep 1h)
|
||||
|
||||
# copy package
|
||||
docker exec $CID bash -c "
|
||||
cd /home/ubuntu &&
|
||||
rsync -qrt --delete --checksum /home/ubuntu/$(basename $WORKDIR)/* newclient"
|
||||
|
||||
# commit image
|
||||
TAG=$(echo $VERSION | awk -F- '{printf "%s-%s", $4, $5}')
|
||||
docker stop $CID
|
||||
docker commit $CID $B9_DOCKER_REGISTRY/c9:$TAG
|
||||
|
||||
# push
|
||||
gcloud docker push $B9_DOCKER_REGISTRY/c9:$TAG
|
||||
}
|
||||
|
||||
_b9_dockerize_has_tag() {
|
||||
local REPO=$1
|
||||
local TAG=$2
|
||||
|
||||
gsutil ls $B9_DOCKER_BUCKET/containers/repositories/library/${REPO}/tag_${TAG}
|
||||
}
|
||||
36
b9/lib/package_storage_gcs.sh
Normal file
36
b9/lib/package_storage_gcs.sh
Normal file
@ -0,0 +1,36 @@
|
||||
_b9_package_is_cached_gcs() {
|
||||
local VERSION=$1
|
||||
gsutil ls gs://cloud9_ci_cache/$(basename $VERSION).tar.xz &> /dev/null
|
||||
}
|
||||
|
||||
_d9_package_upload_gcs() {
|
||||
local WORKDIR=$1
|
||||
local VERSION=$2
|
||||
|
||||
local TMP_TAR
|
||||
local CACHE_FILE
|
||||
|
||||
CACHE_FILE=$(basename $WORKDIR)
|
||||
|
||||
pushd $WORKDIR/.. &> /dev/null
|
||||
|
||||
TMP_TAR=$(mktemp -d b9-package-XXXXXXXXXXXXX --tmpdir=$TMP)/$CACHE_FILE.tar.xz
|
||||
tar -cJf $TMP_TAR $CACHE_FILE
|
||||
gsutil cp $TMP_TAR gs://cloud9_ci_cache
|
||||
mv $TMP_TAR $TMP/$(basename $CACHE_FILE.tar.xz)
|
||||
|
||||
popd &> /dev/null
|
||||
}
|
||||
|
||||
_d9_package_download_gcs() {
|
||||
local VERSION=$1
|
||||
local CACHE_FILE=$TMP/${VERSION}.tar.xz
|
||||
|
||||
if [ -f "$CACHE_FILE" ]; then
|
||||
echo $CACHE_FILE
|
||||
return
|
||||
fi
|
||||
|
||||
gsutil cp gs://cloud9_ci_cache/$(basename $CACHE_FILE) $TMP
|
||||
echo $CACHE_FILE
|
||||
}
|
||||
80
b9/lib/prepare.sh
Normal file
80
b9/lib/prepare.sh
Normal file
@ -0,0 +1,80 @@
|
||||
readonly NPMCACHE=$TMP
|
||||
|
||||
b9_prepare_usage() {
|
||||
echo "Usage: $B9 prepare [OPTIONS]"
|
||||
echo
|
||||
echo "Prepare checkout for testing"
|
||||
echo
|
||||
echo "Options:"
|
||||
echo " --help show this help message"
|
||||
exit 1
|
||||
}
|
||||
|
||||
b9_prepare() {
|
||||
for ARG in "$@"; do
|
||||
case $ARG in
|
||||
--help|-h)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
pushd $C9_DIR &> /dev/null
|
||||
|
||||
# npm
|
||||
rm -rf node_modules
|
||||
git checkout -- node_modules
|
||||
_b9_install_deps
|
||||
git checkout -- node_modules
|
||||
|
||||
popd &> /dev/null
|
||||
}
|
||||
|
||||
_b9_install_deps() {
|
||||
if [ -f plugins/c9.profile/npm-shrinkwrap.json ]; then
|
||||
_b9_setup_node_modules npm-shrinkwrap.json
|
||||
fi
|
||||
|
||||
if [ -f plugins/c9.profile/npm-shrinkwrap.json ]; then
|
||||
_b9_setup_node_modules plugins/c9.profile/npm-shrinkwrap.json
|
||||
fi
|
||||
}
|
||||
|
||||
_b9_setup_node_modules() {
|
||||
local PACKAGE_FILE=$1
|
||||
local PACKAGE_PATH=$(dirname $PACKAGE_FILE)
|
||||
local PACKAGE_MD5=$(cat $PACKAGE_FILE | jq 'del(.version)' | md5sum | awk '{print $1}')
|
||||
local CACHE_FILE="npm-${PACKAGE_MD5}.tar.xz"
|
||||
|
||||
if [ -e "$TMP/$CACHE_FILE" ] || gsutil cp gs://cloud9_ci_cache/$CACHE_FILE $TMP &> /dev/null; then
|
||||
rm -rf $PACKAGE_PATH/node_modules
|
||||
tar -xkf $TMP/$CACHE_FILE || (
|
||||
rm $CACHE_FILE &>/dev/null
|
||||
_b9_compile_node_modules "$CACHE_FILE" "$PACKAGE_PATH"
|
||||
)
|
||||
else
|
||||
_b9_compile_node_modules "$CACHE_FILE" "$PACKAGE_PATH"
|
||||
fi
|
||||
}
|
||||
|
||||
_b9_compile_node_modules() {
|
||||
local CACHE_FILE=$1
|
||||
local PACKAGE_PATH=$2
|
||||
local NPM_CMD
|
||||
local TMP_TAR
|
||||
|
||||
if ! _b9_npm "$(pwd)/$PACKAGE_PATH" install; then
|
||||
rm -rf node_modules
|
||||
git checkout node_modules
|
||||
_b9_npm "$(pwd)/$PACKAGE_PATH" install
|
||||
fi
|
||||
|
||||
TMP_TAR=$(mktemp -d b9-npm-XXXXXXXXXXXXX --tmpdir=$TMP)/$CACHE_FILE
|
||||
tar -cJf $TMP_TAR $PACKAGE_PATH/node_modules
|
||||
|
||||
gsutil cp $TMP_TAR gs://cloud9_ci_cache
|
||||
mv $TMP_TAR $TMP/$CACHE_FILE
|
||||
}
|
||||
4
node_modules/architect/architect.js
generated
vendored
4
node_modules/architect/architect.js
generated
vendored
@ -399,15 +399,17 @@ function checkCycles(config, lookup) {
|
||||
|
||||
var pluginsList = plugins.map(function(p) { return p.packagePath; }).join("\n");
|
||||
var unresolvedList = Object.keys(unresolved);
|
||||
var resolvedList = Object.keys(resolved);
|
||||
console.warn("Could not resolve dependencies of these plugins:\n"
|
||||
+ pluginsList + "\n", plugins,
|
||||
"\nMissing services:\n" + unresolvedList.join("\n") + "\n", unresolved,
|
||||
"\nResolved services:", Object.keys(resolved));
|
||||
"\nResolved services:", resolvedList);
|
||||
var err = new Error("Could not resolve dependencies\n"
|
||||
+ (unresolvedList.length ? "Missing services: " + unresolvedList
|
||||
: "Config contains cyclic dependencies" // TODO print cycles
|
||||
));
|
||||
err.unresolved = unresolvedList;
|
||||
err.resolved = resolvedList;
|
||||
throw err;
|
||||
}
|
||||
|
||||
|
||||
10
node_modules/c9/docker-helpers.js
generated
vendored
10
node_modules/c9/docker-helpers.js
generated
vendored
@ -1,5 +1,15 @@
|
||||
|
||||
var dockerHelpers = {
|
||||
getContainerIdFromContainer: function (container) {
|
||||
var match = container.match(/^[a-f0-9]+/);
|
||||
return match && match[0];
|
||||
},
|
||||
|
||||
getContainerNameFromContainer: function (container) {
|
||||
var match = container.replace(/[^0-9a-zA-Z]$/, "").match(/[0-9a-zA-Z_-]+$/);
|
||||
return match && match[0];
|
||||
},
|
||||
|
||||
getUsernameFromContainerName: function (containerName) {
|
||||
if (containerName.split("-").length < 3) return "";
|
||||
return containerName.replace(/^container-/, "")
|
||||
|
||||
18
node_modules/c9/docker-helpers_test.js
generated
vendored
18
node_modules/c9/docker-helpers_test.js
generated
vendored
@ -7,7 +7,24 @@ var faker = require("faker");
|
||||
var dockerHelpers = require("c9/docker-helpers");
|
||||
|
||||
describe("docker-helpers", function() {
|
||||
describe("getContainerIdFromContainer", function() {
|
||||
it("should work", function() {
|
||||
assert.equal(
|
||||
dockerHelpers.getContainerIdFromContainer('3b765c5179d1 cloud9/ws-html5:2014-11-07T10-08-51Z "/mnt/shared/sbin/mic" 3 weeks ago Up 34 hours 0.0.0.0:16276->22/tcp, 0.0.0.0:47527->8080/tcp, 0.0.0.0:46944->8081/tcp, 0.0.0.0:48538->8082/tcp container-russellfeeed-html_assesment-601963-KPRaMXXRlGruDjpH'),
|
||||
'3b765c5179d1'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getContainerNameFromContainer", function() {
|
||||
it("should work", function() {
|
||||
assert.equal(
|
||||
dockerHelpers.getContainerNameFromContainer('3b765c5179d1 cloud9/ws-html5:2014-11-07T10-08-51Z "/mnt/shared/sbin/mic" 3 weeks ago Up 34 hours 0.0.0.0:16276->22/tcp, 0.0.0.0:47527->8080/tcp, 0.0.0.0:46944->8081/tcp, 0.0.0.0:48538->8082/tcp container-russellfeeed-html_assesment-601963-KPRaMXXRlGruDjpH'),
|
||||
'container-russellfeeed-html_assesment-601963-KPRaMXXRlGruDjpH'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getUsernameFromContainerName", function () {
|
||||
it("should work", function() {
|
||||
assert.equal(dockerHelpers.getUsernameFromContainerName("container-stefko-demo-project-884917"), "stefko");
|
||||
@ -35,6 +52,7 @@ describe("docker-helpers", function() {
|
||||
assert.equal(dockerHelpers.getProjectNameFromContainerName("artawil-etutor_11plus-wp-1422098"), "etutor_11plus-wp");
|
||||
assert.equal(dockerHelpers.getProjectNameFromContainerName("container-johns66139-nice-access-bot-1753521-SDcuzVdxeUNhwhpo"), "nice-access-bot");
|
||||
assert.equal(dockerHelpers.getProjectNameFromContainerName("johns66139-nice-access-bot-1753521-SDcuzVdxeUNhwhpo"), "nice-access-bot");
|
||||
assert.equal(dockerHelpers.getProjectNameFromContainerName("container-russellfeeed-html_assesment-601963-KPRaMXXRlGruDjpH"), "html_assesment");
|
||||
assert.equal(dockerHelpers.getProjectNameFromContainerName("d9canary"), "");
|
||||
assert.equal(dockerHelpers.getProjectNameFromContainerName("selenium-9213"), "");
|
||||
assert.equal(dockerHelpers.getProjectNameFromContainerName("/selenium-9213"), "");
|
||||
|
||||
6
node_modules/c9/format-user-analytics.js
generated
vendored
6
node_modules/c9/format-user-analytics.js
generated
vendored
@ -9,9 +9,11 @@ define(function(require, exports, module) {
|
||||
|
||||
function formatUser(user) {
|
||||
if (!user) return {}; // empty traits get ignored
|
||||
|
||||
|
||||
var uid = /^\d+$/.test(user.id) ? user.id : user.uid;
|
||||
|
||||
var traits = {
|
||||
uid: user.id,
|
||||
uid: uid,
|
||||
username: user.name || user.username,
|
||||
email: user.email,
|
||||
createdAt: user.date_add,
|
||||
|
||||
2
node_modules/vfs-local/localfs.js
generated
vendored
2
node_modules/vfs-local/localfs.js
generated
vendored
@ -157,6 +157,8 @@ module.exports = function setup(fsOptions) {
|
||||
|
||||
// Export the API
|
||||
var vfs = wrapDomain({
|
||||
fsOptions: fsOptions,
|
||||
|
||||
// File management
|
||||
resolve: resolve,
|
||||
stat: stat,
|
||||
|
||||
16
package.json
16
package.json
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "c9",
|
||||
"description": "New Cloud9 Client",
|
||||
"version": "3.1.1235",
|
||||
"version": "3.1.1277",
|
||||
"author": "Ajax.org B.V. <info@ajax.org>",
|
||||
"private": true,
|
||||
"main": "bin/c9",
|
||||
@ -56,7 +56,7 @@
|
||||
"c9"
|
||||
],
|
||||
"c9plugins": {
|
||||
"c9.ide.language": "#dc02e5c4c2",
|
||||
"c9.ide.language": "#ad77e2cbd6",
|
||||
"c9.ide.language.css": "#be07d72209",
|
||||
"c9.ide.language.generic": "#a4023db7f6",
|
||||
"c9.ide.language.html": "#22fdc74869",
|
||||
@ -66,8 +66,8 @@
|
||||
"c9.ide.language.javascript.eslint": "#3127e1eba1",
|
||||
"c9.ide.language.javascript.tern": "#bf1072b11a",
|
||||
"c9.ide.language.javascript.infer": "#0561c69d67",
|
||||
"c9.ide.language.jsonalyzer": "#243e11bd42",
|
||||
"c9.ide.collab": "#4b93a497cd",
|
||||
"c9.ide.language.jsonalyzer": "#bf12af3d0a",
|
||||
"c9.ide.collab": "#1073faea73",
|
||||
"c9.ide.local": "#a6e689e33b",
|
||||
"c9.ide.find": "#e33fbaed2f",
|
||||
"c9.ide.find.infiles": "#c3bf17286d",
|
||||
@ -91,7 +91,7 @@
|
||||
"c9.ide.imgeditor": "#612e75ef4f",
|
||||
"c9.ide.immediate": "#a962119bec",
|
||||
"c9.ide.installer": "#0fde9f0067",
|
||||
"c9.ide.language.python": "#4fad6f5a80",
|
||||
"c9.ide.language.python": "#675ddb4c8f",
|
||||
"c9.ide.language.go": "#8f6d0beae7",
|
||||
"c9.ide.mount": "#86c8985f34",
|
||||
"c9.ide.navigate": "#1fbb7cd53b",
|
||||
@ -105,10 +105,10 @@
|
||||
"c9.ide.recentfiles": "#7c099abf40",
|
||||
"c9.ide.remote": "#301d2ab519",
|
||||
"c9.ide.processlist": "#2b12cd1bdd",
|
||||
"c9.ide.run": "#a25ea419b8",
|
||||
"c9.ide.run": "#1a5a660c44",
|
||||
"c9.ide.run.build": "#0598fff697",
|
||||
"c9.ide.run.debug.xdebug": "#a1b39e0ac4",
|
||||
"c9.ide.save": "#f8aaf93ea1",
|
||||
"c9.ide.run.debug.xdebug": "#9956689819",
|
||||
"c9.ide.save": "#2de9fd7c2d",
|
||||
"c9.ide.scm": "#ca3c94b84f",
|
||||
"c9.ide.terminal.monitor": "#1ccac33b0d",
|
||||
"c9.ide.test": "#a282ec1619",
|
||||
|
||||
0
plugins/c9.fs/mock/python/app/__init__.py
Normal file
0
plugins/c9.fs/mock/python/app/__init__.py
Normal file
1
plugins/c9.fs/mock/python/app/tests/__init__.py
Normal file
1
plugins/c9.fs/mock/python/app/tests/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
|
||||
4
plugins/c9.fs/mock/python/app/tests/tests.py
Normal file
4
plugins/c9.fs/mock/python/app/tests/tests.py
Normal file
@ -0,0 +1,4 @@
|
||||
from ..user.models import User
|
||||
|
||||
def test_user():
|
||||
return User()
|
||||
1
plugins/c9.fs/mock/python/app/user/__init__.py
Normal file
1
plugins/c9.fs/mock/python/app/user/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
|
||||
3
plugins/c9.fs/mock/python/app/user/models.py
Normal file
3
plugins/c9.fs/mock/python/app/user/models.py
Normal file
@ -0,0 +1,3 @@
|
||||
class User():
|
||||
def __repr__(self):
|
||||
return '<User test>'
|
||||
9
plugins/c9.fs/mock/python/test_user.py
Normal file
9
plugins/c9.fs/mock/python/test_user.py
Normal file
@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env python3
|
||||
from app.tests.tests import test_user
|
||||
from doesntexist import badimport
|
||||
|
||||
print(test_user())
|
||||
|
||||
bad_call()
|
||||
|
||||
badimport.foo()
|
||||
@ -1764,11 +1764,11 @@ define(function(require, exports, module) {
|
||||
|
||||
// Set Gutter Context Menu
|
||||
if (ui.isChildOf(gutter, target, true)) {
|
||||
mnuGutter.show(e.x, e.y);
|
||||
mnuGutter.show(e.x, e.y, "context");
|
||||
}
|
||||
// Set main Ace Context Menu
|
||||
else {
|
||||
mnuAce.show(e.x, e.y);
|
||||
mnuAce.show(e.x, e.y, "context");
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
@ -1419,12 +1419,13 @@ window.TraceKit = TraceKit;
|
||||
}
|
||||
|
||||
var blackListedErrors = {
|
||||
'Error with empty message': {},
|
||||
'Script error.': {},
|
||||
'DealPly is not defined': { factor: 10e5 },
|
||||
"Cannot read property 'style' of null": { factor: 10e3 },
|
||||
"Project with id '<id>' does not exist": { factor: 10e2 },
|
||||
"Workspace not found": { factor: 10e2 },
|
||||
count: 0,
|
||||
'#Error with empty message': {},
|
||||
'#Script error.': {},
|
||||
'#DealPly is not defined': { factor: 10e5 },
|
||||
"#Cannot read property 'style' of null": { factor: 10e3 },
|
||||
"#Project with id '<id>' does not exist": { factor: 10e2 },
|
||||
"#Workspace not found": { factor: 10e2 },
|
||||
};
|
||||
var groupedErrors = [{
|
||||
regex: /^((?:Project|User) with id ')(\d+)(' does not exist)/i,
|
||||
@ -1511,13 +1512,16 @@ window.TraceKit = TraceKit;
|
||||
}
|
||||
});
|
||||
|
||||
if (blackListedErrors.hasOwnProperty(message)) {
|
||||
var count = (blackListedErrors[message].count || 0) + 1;
|
||||
blackListedErrors[message].count = count;
|
||||
if (count % (blackListedErrors[message].factor || 10) !== 1) {
|
||||
var blackListEntry = blackListedErrors["#" + message];
|
||||
if (blackListEntry) {
|
||||
var count = (blackListEntry.count || 0) + 1;
|
||||
blackListEntry.count = count;
|
||||
if (count % (blackListEntry.factor || 10) !== 1) {
|
||||
return;
|
||||
}
|
||||
finalCustomData.$blackList = blackListedErrors[message];
|
||||
finalCustomData.$blackList = blackListEntry;
|
||||
} else if (blackListedErrors.count < 10000) {
|
||||
blackListedErrors["#" + message] = {};
|
||||
}
|
||||
|
||||
var payload = {
|
||||
|
||||
@ -52,16 +52,6 @@ require(["lib/architect/architect", "lib/chai/chai", "/vfs-root"], function (arc
|
||||
baseProc: baseProc
|
||||
},
|
||||
|
||||
// Mock plugins
|
||||
{
|
||||
consumes: ["apf", "ui", "Plugin"],
|
||||
provides: [
|
||||
"commands", "menus", "commands", "layout", "watcher",
|
||||
"save", "anims", "clipboard", "dialog.alert", "auth.bootstrap",
|
||||
"info", "dialog.error"
|
||||
],
|
||||
setup: expect.html.mocked
|
||||
},
|
||||
{
|
||||
consumes: ["tabManager", "proc", "terminal"],
|
||||
provides: [],
|
||||
@ -106,12 +96,12 @@ require(["lib/architect/architect", "lib/chai/chai", "/vfs-root"], function (arc
|
||||
var sessId;
|
||||
it('should open a pane with just an editor', function(done) {
|
||||
tabs.openEditor("terminal", function(err, tab) {
|
||||
expect(err).to.not.ok;
|
||||
expect(tabs.getTabs()).length(1);
|
||||
|
||||
var doc = tab.document;
|
||||
doc.on("setTitle", function c1(){
|
||||
expect(doc.title)
|
||||
.match(new RegExp("^bash - "));
|
||||
// expect(doc.title).match(new RegExp("^bash - "));
|
||||
|
||||
sessId = doc.getSession().id;
|
||||
|
||||
@ -129,7 +119,7 @@ require(["lib/architect/architect", "lib/chai/chai", "/vfs-root"], function (arc
|
||||
|
||||
session.once("connected", function(){
|
||||
doc.once("setTitle", function(){
|
||||
expect(doc.title).to.match(/^bash - /);
|
||||
// expect(doc.title).to.match(/^bash - /);
|
||||
expect(session.id).to.equal(sessId);
|
||||
done();
|
||||
});
|
||||
@ -139,7 +129,7 @@ require(["lib/architect/architect", "lib/chai/chai", "/vfs-root"], function (arc
|
||||
session.pty.kill();
|
||||
});
|
||||
|
||||
it('should reconnect when the session has been lost', function(done) {
|
||||
it.skip('should reconnect when the session has been lost', function(done) {
|
||||
var doc = tabs.focussedTab.document;
|
||||
var session = doc.getSession();
|
||||
|
||||
@ -169,8 +159,7 @@ require(["lib/architect/architect", "lib/chai/chai", "/vfs-root"], function (arc
|
||||
|
||||
var doc = tab.document;
|
||||
doc.on("setTitle", function c1(){
|
||||
expect(doc.title)
|
||||
.match(new RegExp("^bash - "));
|
||||
// expect(doc.title).match(new RegExp("^bash - "));
|
||||
|
||||
doc.off("setTitle", c1);
|
||||
done();
|
||||
@ -184,9 +173,11 @@ require(["lib/architect/architect", "lib/chai/chai", "/vfs-root"], function (arc
|
||||
var state, info = {};
|
||||
before(function(done) {
|
||||
tabs.getTabs()[0].activate();
|
||||
tabs.focussedTab.editor.write("ls -l\r");
|
||||
setTimeout(done, 5000);
|
||||
})
|
||||
tabs.focussedTab.editor.write("echo 123\r");
|
||||
tabs.focussedTab.document.getSession().terminal.once("afterWrite", function() {
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should retrieve the state', function(done) {
|
||||
state = tabs.getState();
|
||||
@ -214,6 +205,7 @@ require(["lib/architect/architect", "lib/chai/chai", "/vfs-root"], function (arc
|
||||
var l = info.pages.length;
|
||||
expect(tabs.getTabs()).length(l);
|
||||
expect(tabs.getPanes()[0].getTabs()).length(l);
|
||||
tabs.getPanes()[0].focus();
|
||||
expect(tabs.focussedTab.pane.getTabs()).length(l);
|
||||
|
||||
expect(tabs.getTabs().map(function(tab) {
|
||||
@ -231,7 +223,7 @@ require(["lib/architect/architect", "lib/chai/chai", "/vfs-root"], function (arc
|
||||
|
||||
setTimeout(function(){
|
||||
done();
|
||||
}, 2000);
|
||||
});
|
||||
});
|
||||
if (!onload.remain) {
|
||||
it('should reconnect both terminals when doing kill-server', function(done) {
|
||||
@ -244,7 +236,7 @@ require(["lib/architect/architect", "lib/chai/chai", "/vfs-root"], function (arc
|
||||
|
||||
session.off("connected", c0);
|
||||
});
|
||||
})
|
||||
});
|
||||
|
||||
tabs.focussedTab.editor.write(String.fromCharCode(2) + ":kill-server\r");
|
||||
});
|
||||
@ -269,20 +261,21 @@ require(["lib/architect/architect", "lib/chai/chai", "/vfs-root"], function (arc
|
||||
var id = session.id;
|
||||
|
||||
tabs.focussedTab.unload();
|
||||
done();
|
||||
|
||||
setTimeout(function(){
|
||||
proc.execFile("tmux", {
|
||||
args: ["list-sessions"]
|
||||
}, function(err, stdout, stderr) {
|
||||
// Ignore errors for now
|
||||
if (err)
|
||||
throw err.message;
|
||||
// setTimeout(function(){
|
||||
// proc.execFile("tmux", {
|
||||
// args: ["list-sessions"]
|
||||
// }, function(err, stdout, stderr) {
|
||||
// // Ignore errors for now
|
||||
// if (err)
|
||||
// throw err.message;
|
||||
|
||||
expect(id).is.ok
|
||||
expect(stdout.indexOf(id) > -1).is.not.ok;
|
||||
done();
|
||||
});
|
||||
}, 3000);
|
||||
// expect(id).is.ok
|
||||
// expect(stdout.indexOf(id) > -1).is.not.ok;
|
||||
// done();
|
||||
// });
|
||||
// }, 3000);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@ -946,7 +946,9 @@ define(function(require, exports, module) {
|
||||
aml.removeChild(item.aml);
|
||||
}
|
||||
|
||||
function show(x, y) {
|
||||
function show(x, y, type) {
|
||||
if (type == "context")
|
||||
y++;
|
||||
lastCoords = { x : x, y : y };
|
||||
aml.display(x, y);
|
||||
}
|
||||
|
||||
@ -374,8 +374,8 @@ define(function(require, exports, module) {
|
||||
function recallVfs() {
|
||||
var vfs;
|
||||
try {
|
||||
vfs = JSON.parse(lastVfs || window.sessionStorage.getItem("vfsid"));
|
||||
if (!lastVfs) {
|
||||
vfs = JSON.parse(lastVfs || window.sessionStorage.getItem("vfsid") || null);
|
||||
if (!lastVfs && vfs) {
|
||||
window.sessionStorage.removeItem("vfsid");
|
||||
lastVfs = JSON.stringify(vfs);
|
||||
}
|
||||
|
||||
@ -248,7 +248,7 @@
|
||||
mocha.run(done);
|
||||
}
|
||||
};
|
||||
onload.remain = onload.remain == "1";
|
||||
onload.remain = options.remain == "1";
|
||||
|
||||
mocha.timeout(10000);
|
||||
|
||||
|
||||
@ -489,27 +489,51 @@ require([
|
||||
x.unregister = function(){};
|
||||
return x;
|
||||
})(),
|
||||
"immediate": (function(){
|
||||
var x = new EventEmitter();
|
||||
x.register = function(){};
|
||||
x.unregister = function(){};
|
||||
return x;
|
||||
})(),
|
||||
"c9.analytics": (function(){
|
||||
var x = new EventEmitter();
|
||||
x.register = function(){};
|
||||
x.unregister = function(){};
|
||||
return x;
|
||||
})(),
|
||||
});
|
||||
};
|
||||
|
||||
expect.setupArchitectTest = function(config, _, options) {
|
||||
if (options && options.mockPlugins) {
|
||||
config.push({
|
||||
consumes: [],
|
||||
consumes: options.existingPlugins || [],
|
||||
provides: options.mockPlugins,
|
||||
setup: expect.html.mocked
|
||||
});
|
||||
}
|
||||
architect.resolveConfig(config, function(err, config) {
|
||||
/*global describe it before after = */
|
||||
/*global describe it before after */
|
||||
if (err) throw err;
|
||||
var app = architect.createApp(config, function(err, app) {
|
||||
if (err && err.unresolved && !config.unresolved) {
|
||||
console.warn("Adding mock services for " + err.unresolved);
|
||||
config.unresolved = err.unresolved;
|
||||
return expect.setupArchitectTest(config, architect, {
|
||||
mockPlugins: config.unresolved
|
||||
expect.html.mocked({}, {}, function(a, mockServices) {
|
||||
err.missingMock = err.unresolved.filter(function(x) {
|
||||
return !mockServices[x];
|
||||
});
|
||||
config.unresolved = err.unresolved.filter(function(x) {
|
||||
return mockServices[x];
|
||||
});
|
||||
});
|
||||
if (err.missingMock.length) {
|
||||
console.error("Missing mock services for " + err.missingMock);
|
||||
} else {
|
||||
console.warn("Adding mock services for " + err.unresolved);
|
||||
return expect.setupArchitectTest(config, architect, {
|
||||
mockPlugins: config.unresolved,
|
||||
existingPlugins: err.resolved
|
||||
});
|
||||
}
|
||||
}
|
||||
if (typeof describe == "function") {
|
||||
describe('app', function() {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user