forked from La0/runreport
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathfabfile.py
More file actions
executable file
·138 lines (116 loc) · 3.3 KB
/
fabfile.py
File metadata and controls
executable file
·138 lines (116 loc) · 3.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
from fabric.api import *
from coach.settings import FABRIC_HOSTS, DATABASES # Mandatory
try:
from coach.settings import FABRIC_ENV, FABRIC_BASE, FABRIC_SUPERVISORS # Optional
except Exception:
FABRIC_SUPERVISORS = []
pass
import os
import shutil
from time import time
env.hosts = FABRIC_HOSTS
from datetime import date
def prod():
# Stop services
supervisors('stop')
# Brutally kill celery workers as supervisor
# doesn't do its job :(
if 'runreport_celery' in FABRIC_SUPERVISORS:
with settings(warn_only=True):
run("ps auxww | grep 'celery -A coach worker' | grep -v grep | awk '{print $2}' |xargs kill -9")
with cd(FABRIC_BASE):
pull()
with virtualenv(FABRIC_ENV):
update_requirements()
submodules()
migrate_db()
# Start again
supervisors('start')
def syncdb(update=False):
# Import dump from server
local_dump = 'prod.json'
if update:
print 'Try to update Database dump'
prod_dump = '/tmp/runreport.json'
apps = ('sport', 'users', 'club', 'page', 'messages', 'tracks', 'friends')
with cd(FABRIC_BASE):
with virtualenv(FABRIC_ENV):
run('./manage.py dumpdata --indent=4 -e sessions %s > %s' % (' '.join(apps), prod_dump))
get(prod_dump, local_dump)
else:
print 'Use today dump on server'
prod_dump = '~/db/%s.json' % date.today().strftime('%Y%m%d')
get(prod_dump, local_dump)
# Re create db & load dump
createdb(False) # no fixtures here
local('./manage.py loaddata %s' % local_dump)
os.remove(local_dump)
def createdb(use_fixtures=True):
'''
Create the Pgsql database
* delete old database if exists
* create new one through psql
'''
# Drop old database manually
db = DATABASES['default']
psql('drop database if exists %s' % db['NAME'], 'postgres')
# Create new database
psql('create database %(NAME)s with owner = %(USER)s' % db, 'postgres')
# Init Postgis on database
psql('create extension postgis')
# Create structure
local('./manage.py migrate')
if use_fixtures:
# Load some basic fixtures
fixtures = (
'sport/data/sports.json',
'users/data/categories.json',
'users/data/demo.json',
'club/data/demo.json',
)
for f in fixtures:
local('./manage.py loaddata %s' % f)
def psql(sql, dbname=None):
'''
Run a pgsql command through cli
'''
db = DATABASES['default']
print db
suffix = db['ENGINE'][db['ENGINE'].rindex('.') + 1:]
if suffix not in ('postgis',):
raise Exception('Only PostGis is supported')
cmd = 'PGPASSWORD="%(PASSWORD)s" psql --username=%(USER)s --host=%(HOST)s' % db
cmd += ' --dbname=%s' % (dbname or db['NAME'])
cmd += ' --command="%s"' % sql
local(cmd)
def virtualenv(name='django'):
'''
Source a virtualenv on prefix
'''
return prefix('source %s/bin/activate' % name)
def update_requirements():
'''
Update through pip
'''
run('pip install -r requirements.txt')
def pull():
'''
Pull from github
'''
run('git pull')
def migrate_db():
'''
Update db using South migrations
'''
run('./manage.py migrate')
def supervisors(cmd):
for s in FABRIC_SUPERVISORS:
supervisor(cmd, s)
def supervisor(cmd, process):
'''
Control processes through supervisor
'''
run('supervisorctl %s %s' % (cmd, process))
def submodules():
run('git submodule init')
run('git submodule update')