Skip to content

Commit 96855fb

Browse files
committed
Fixed the transaction related errors
1 parent f5f4f41 commit 96855fb

File tree

7 files changed

+32
-35
lines changed

7 files changed

+32
-35
lines changed

pgd/settings.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -185,6 +185,7 @@
185185
}
186186
}
187187

188+
DJANGO_SETTINGS_MODULE = 'pgd.settings'
188189
# PGD Specific settings
189190
QUERY_LIMIT = config('QUERY_LIMIT', default=50000000, cast=int)
190191
SEGMENT_SIZE = config('SEGMENT_SIZE', default=10, cast=int)

pgd/wsgi.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,12 @@
1414
1515
"""
1616
import os
17+
import sys
18+
from django.core.wsgi import get_wsgi_application
1719

18-
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pgd.settings")
1920

21+
#os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pgd.settings")
22+
os.environ['DJANGO_SETTINGS_MODULE'] = 'pgd.settings'
2023
# This application object is used by any WSGI server configured to use this
2124
# file. This includes Django's development server, if the WSGI_APPLICATION
2225
# setting points here.

pgd_search/plot/ConfDistFuncs.py

Lines changed: 6 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -312,7 +312,7 @@ def query_bins(self):
312312
annotations[avg] = Avg(field[1])
313313
annotations[stddev] = StdDev(field[1])
314314
annotated_query = querySet.annotate(**annotations)
315-
print annotated_query
315+
316316
# sort and group by bins using an aggregate function that calculates
317317
# bin index based on bin size (in field units ie. degrees) and bin count.
318318
#
@@ -325,27 +325,20 @@ def query_bins(self):
325325
# XXX in Django 1.2+ aggregates were changed to require connection and
326326
# SQLCompiler objects to generate sql. We must initialize this all
327327
# manually to be able to grab the SQL for just our aggregate.
328-
print "xTextString is :"
329-
print self.xTextString
330-
print "yTextString is "
331-
print self.yTextString
332-
print querySet
328+
333329
sortx = BinSort(self.xTextString, offset=x, bincount=xbin, max=x1)
334330
sorty = BinSort(self.yTextString, offset=y, bincount=ybin, max=y1)
335331

336-
pgdaggr = PGDAggregate(self.xTextString, offset=x, bincount=xbin, max=x1)
337-
print "Checking Attribute for PGDAggregate"
338-
print hasattr(pgdaggr, 'aggr')
339332
annotated_query.annotate(x=sortx, y=sorty)
340333

341334
cn = connections['default']
342335
qn = SQLCompiler(annotated_query.query, cn, 'default').quote_name_unless_alias
343-
if not hasattr(sortx, 'queryObj'):
344-
print "doesn't have queryObj"
345-
else :
346-
print "has queryObj"
336+
337+
#Hack : Using the BinSortSQL class directly instead of BinSort
338+
#this overrides l:329 and l:330
347339
sortx = BinSortSQL(('pgd_core_residue', 'psi'), offset=x, bincount=xbin, max=x1)
348340
sorty = BinSortSQL(('pgd_core_residue', 'phi'), offset=y, bincount=ybin, max=y1)
341+
349342
sortx_sql = sortx.as_sql(qn, cn)[0]
350343
sorty_sql = sorty.as_sql(qn, cn)[0]
351344

pgd_search/plot/PlotForm.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,9 @@ def clean(self):
168168
if not data['background_color'] :
169169
data['background_color'] = 'Transparent'
170170
if not data['graph_color'] :
171-
data['graph_color'] = 'Transparent'
171+
data['graph_color'] = 'Dark Gray'
172172
if not data['hash_color'] :
173-
data['hash_color'] = 'Transparent'
173+
data['hash_color'] = 'Gray'
174+
if not data['text_color'] :
175+
data['text_color'] = 'Black'
174176
return data

pgd_search/plot/views.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,6 @@ def renderToSVG(request):
194194
return HttpResponse(_json)
195195

196196
else:
197-
print form.errors
198197
"""
199198
Errors in the form - repackage the error list as a list of errors
200199
This list can then be json serialized and processed by the javascript

pgd_search/statistics/aggregates.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -29,15 +29,6 @@ def add_to_query(self, query, alias, col, source, is_summary):
2929
klass = globals()['%sSQL' % self.name]
3030
aggregate = klass(col, source=source, is_summary=is_summary, **self.extra)
3131

32-
print self.extra
33-
34-
print "alias is :"
35-
print alias
36-
print "col is :"
37-
print col
38-
print "source is : "
39-
print source
40-
4132
# Validate that the backend has a fully supported, correct
4233
# implementation of this aggregate
4334
self.aggr = aggregate

pgd_splicer/ProcessPDBTask.py

Lines changed: 17 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@
3232
from Bio.PDB import calc_angle as pdb_calc_angle
3333
from Bio.PDB import calc_dihedral as pdb_calc_dihedral
3434
from django.db import transaction
35+
import django
3536

3637
from pgd_core.models import (Protein as ProteinModel, Chain as ChainModel,
3738
Residue as ResidueModel, Sidechain_ARG,
@@ -45,6 +46,7 @@
4546
from pgd_splicer.chi import CHI_MAP, CHI_CORRECTIONS_TESTS, CHI_CORRECTIONS
4647
from pgd_splicer.sidechain import bond_angles, bond_lengths
4748

49+
django.setup()
4850

4951
def NO_VALUE(field):
5052
"""
@@ -195,12 +197,12 @@ def workhorse(data):
195197
return False
196198

197199

198-
@transaction.commit_manually
200+
199201
def process_pdb(data):
200202
"""
201203
Process an individual pdb file
202204
"""
203-
205+
transaction.set_autocommit(False)
204206
# create a copy of the data. This dict will have a large amount of data
205207
# added to it as the protein is processed. This prevents memory leaks
206208
# due to the original dict having a reference held outside this method.
@@ -233,8 +235,10 @@ def process_pdb(data):
233235
protein.rfactor = float(data['rfactor'])
234236
protein.rfree = float(data['rfree'])
235237
protein.pdb_date = data['pdb_date']
236-
protein.save()
238+
with transaction.atomic():
239+
protein.save()
237240

241+
transaction.commit()
238242
# 3) Get/Create Chains and save values
239243
chains = {}
240244
for chaincode, residues in data['chains'].items():
@@ -248,9 +252,10 @@ def process_pdb(data):
248252
chain.id = chainId
249253
chain.protein = protein
250254
chain.code = chaincode
251-
chain.save()
255+
with transaction.atomic():
256+
chain.save()
252257

253-
protein.chains.add(chain)
258+
protein.chains.add(chain)
254259
#create dictionary of chains for quick access
255260
chains[chaincode] = chain
256261

@@ -288,17 +293,20 @@ def process_pdb(data):
288293
except:
289294
sidechain = klass()
290295
sidechain.__dict__.update(residue_props['sidechain'])
291-
sidechain.save()
296+
with transaction.atomic():
297+
sidechain.save()
292298
residue.__setattr__(name, sidechain)
293299

294300
# 4e) save
295-
residue.save()
296-
chain.residues.add(residue)
301+
with transaction.atomic():
302+
residue.save()
303+
chain.residues.add(residue)
297304

298305
# 4f) Update old_residue.next
299306
if "prev" in residue_props:
300307
old_residue.next = residue
301-
old_residue.save()
308+
with transaction.atomic():
309+
old_residue.save()
302310

303311

304312
old_residue = residue

0 commit comments

Comments
 (0)