Commit | Line | Data |
---|---|---|
5cf90361 PP |
1 | # -*- coding: utf-8 -*- |
2 | import base64 | |
3 | import pickle | |
4 | import random | |
5 | ||
6 | from collections import deque | |
7 | from copy import copy | |
8 | try: | |
9 | from itertools import combinations | |
10 | except ImportError: | |
11 | ||
12 | def combinations(items, n): | |
13 | if n == 0: | |
14 | yield [] | |
15 | else: | |
16 | for i in xrange(len(items)): | |
17 | for cc in combinations(items[i + 1:], n - 1): | |
18 | yield [items[i]] + cc | |
19 | ||
20 | from django.db.models import get_models | |
21 | from django.db.models.fields.related import (ForeignKey, OneToOneField, | |
22 | ManyToManyField) | |
23 | from django.core.exceptions import SuspiciousOperation | |
24 | from django.conf import settings | |
25 | from django.utils.hashcompat import md5_constructor | |
26 | from django.utils.importlib import import_module | |
27 | from django.utils.simplejson import dumps | |
28 | ||
29 | try: | |
30 | from django.db.models.fields.generic import GenericRelation | |
31 | except ImportError: | |
32 | from django.contrib.contenttypes.generic import GenericRelation | |
33 | ||
34 | try: | |
35 | qbe_formats = getattr(settings, "QBE_FORMATS_EXPORT", "qbe_formats") | |
36 | formats = import_module(qbe_formats).formats | |
37 | except ImportError: | |
38 | from django_qbe.exports import formats | |
39 | formats # Makes pyflakes happy | |
40 | ||
41 | ||
42 | def qbe_models(admin_site=None, only_admin_models=False, json=False): | |
43 | app_models = get_models(include_auto_created=True, include_deferred=True) | |
44 | app_models_with_no_includes = get_models(include_auto_created=False, | |
45 | include_deferred=False) | |
46 | if admin_site: | |
47 | admin_models = [m for m, a in admin_site._registry.items()] | |
48 | else: | |
49 | admin_models = [] | |
50 | if only_admin_models: | |
51 | app_models = admin_models | |
52 | graphs = {} | |
53 | ||
54 | def get_field_attributes(field): | |
55 | return {field.name: { | |
56 | 'name': field.name, | |
57 | 'type': type(field).__name__, | |
58 | 'blank': field.blank, | |
59 | 'label': "%s" % field.verbose_name.lower().capitalize(), | |
60 | 'primary': field.primary_key, | |
61 | }} | |
62 | ||
63 | def get_through_fields(field): | |
64 | # Deprecated | |
65 | through_fields = [] | |
66 | for through_field in field.rel.through._meta.fields: | |
67 | label = through_field.verbose_name.lower().capitalize() | |
68 | through_fields_dic = { | |
69 | 'name': through_field.name, | |
70 | 'type': type(through_field).__name__, | |
71 | 'blank': through_field.blank, | |
72 | 'label': u"%s" % label, | |
73 | } | |
74 | if hasattr(through_field.rel, "to"): | |
75 | through_rel = through_field.rel | |
76 | through_mod = through_rel.to.__module__.split(".")[-2] | |
77 | through_name = through_mod.lower().capitalize() | |
78 | through_target = { | |
79 | 'name': through_name, | |
80 | 'model': through_rel.to.__name__, | |
81 | 'field': through_rel.get_related_field().name, | |
82 | } | |
83 | through_fields_dic.update({ | |
84 | "target": through_target, | |
85 | }) | |
86 | through_fields.append(through_fields_dic) | |
87 | return through_fields | |
88 | ||
89 | def get_target(field): | |
90 | name = field.rel.to.__module__.split(".")[-2].lower().capitalize() | |
e02ec166 OL |
91 | try: |
92 | t_field = field.rel.field_name | |
93 | except: | |
94 | t_field = field.rel.to._meta.pk.name | |
5cf90361 PP |
95 | target = { |
96 | 'name': name, | |
97 | 'model': field.rel.to.__name__, | |
e02ec166 | 98 | 'field': t_field, |
5cf90361 PP |
99 | } |
100 | if hasattr(field.rel, 'through') and field.rel.through is not None: | |
101 | name = field.rel.through.__module__.split(".")[-2] | |
102 | target.update({ | |
103 | 'through': { | |
104 | 'name': name.lower().capitalize(), | |
105 | 'model': field.rel.through.__name__, | |
106 | 'field': field.rel.through._meta.pk.name, | |
107 | } | |
108 | }) | |
109 | return target | |
110 | ||
111 | def get_target_relation(field, extras=""): | |
112 | target = get_target(field) | |
113 | relation = { | |
114 | 'target': target, | |
115 | 'type': type(field).__name__, | |
116 | 'source': field.name, | |
117 | 'arrows': extras, | |
118 | } | |
119 | return target, relation | |
120 | ||
121 | def add_relation(model, field, extras=""): | |
122 | target, relation = get_target_relation(field, extras=extras) | |
123 | if relation not in model['relations']: | |
124 | model['relations'].append(relation) | |
125 | model['fields'][field.name].update({'target': target}) | |
126 | return model | |
127 | ||
128 | for app_model in app_models: | |
129 | model = { | |
130 | 'name': app_model.__name__, | |
131 | 'fields': {}, | |
132 | 'relations': [], | |
133 | 'primary': app_model._meta.pk.name, | |
134 | 'collapse': ((app_model not in admin_models) or | |
135 | (app_model not in app_models_with_no_includes)), | |
136 | 'is_auto': app_model not in app_models_with_no_includes, | |
137 | } | |
138 | ||
139 | for field in app_model._meta.fields: | |
140 | field_attributes = get_field_attributes(field) | |
141 | model['fields'].update(field_attributes) | |
142 | if isinstance(field, ForeignKey): | |
143 | model = add_relation(model, field) | |
144 | elif isinstance(field, OneToOneField): | |
145 | extras = "" # "[arrowhead=none arrowtail=none]" | |
146 | model = add_relation(model, field, extras=extras) | |
147 | ||
148 | if app_model._meta.many_to_many: | |
149 | for field in app_model._meta.many_to_many: | |
150 | field_attributes = get_field_attributes(field) | |
151 | model['fields'].update(field_attributes) | |
152 | if isinstance(field, ManyToManyField): | |
153 | extras = "" # "[arrowhead=normal arrowtail=normal]" | |
154 | model = add_relation(model, field, extras=extras) | |
155 | elif isinstance(field, GenericRelation): | |
156 | extras = "" # '[style="dotted"] | |
157 | # [arrowhead=normal arrowtail=normal]' | |
158 | model = add_relation(model, field, extras=extras) | |
159 | ||
160 | model['fields'] = sorted(model['fields'].items()) | |
161 | ||
162 | app_title = app_model._meta.app_label.title().lower().capitalize() | |
163 | if app_title not in graphs: | |
164 | graphs[app_title] = {} | |
165 | graphs[app_title].update({app_model.__name__: model}) | |
166 | ||
167 | if json: | |
168 | return dumps(graphs) | |
169 | else: | |
170 | return graphs | |
171 | ||
172 | ||
173 | def qbe_graph(admin_site=None, directed=False): | |
174 | models = qbe_models(admin_site) | |
175 | graph = {} | |
176 | for k, v in models.items(): | |
177 | for l, w in v.items(): | |
178 | key = "%s.%s" % (k, l) | |
179 | if key not in graph: | |
180 | graph[key] = [] | |
181 | for relation in w['relations']: | |
182 | source = relation['source'] | |
183 | target = relation['target'] | |
184 | if "through" in target: | |
185 | through = target["through"] | |
186 | model = "%s.%s" % (through['name'], through['model']) | |
187 | value = (source, model, through['field']) | |
188 | else: | |
189 | model = "%s.%s" % (target['name'], target['model']) | |
190 | value = (source, model, target['field']) | |
191 | if value not in graph[key]: | |
192 | graph[key].append(value) | |
193 | if not directed: | |
194 | if model not in graph: | |
195 | graph[model] = [] | |
196 | target_field = target['field'] | |
197 | target_value = (target_field, key, source) | |
198 | if target_value not in graph[model]: | |
199 | graph[model].append(target_value) | |
200 | if not graph[key]: | |
201 | del graph[key] | |
202 | return graph | |
203 | ||
204 | ||
205 | def qbe_tree(graph, nodes, root=None): | |
206 | """ | |
207 | Given a graph, nodes to explore and an optinal root, do a breadth-first | |
208 | search in order to return the tree. | |
209 | """ | |
210 | if root: | |
211 | start = root | |
212 | else: | |
213 | index = random.randint(0, len(nodes) - 1) | |
214 | start = nodes[index] | |
215 | # A queue to BFS instead DFS | |
216 | to_visit = deque() | |
217 | cnodes = copy(nodes) | |
218 | visited = set() | |
219 | # Format is (parent, parent_edge, neighbor, neighbor_field) | |
220 | to_visit.append((None, None, start, None)) | |
221 | tree = {} | |
222 | while len(to_visit) != 0 and nodes: | |
223 | parent, parent_edge, v, v_edge = to_visit.pop() | |
224 | # Prune | |
225 | if v in nodes: | |
226 | nodes.remove(v) | |
227 | node = graph[v] | |
228 | if v not in visited and len(node) > 1: | |
229 | visited.add(v) | |
230 | # Preorder process | |
231 | if all((parent, parent_edge, v, v_edge)): | |
232 | if parent not in tree: | |
233 | tree[parent] = [] | |
234 | if (parent_edge, v, v_edge) not in tree[parent]: | |
235 | tree[parent].append((parent_edge, v, v_edge)) | |
236 | if v not in tree: | |
237 | tree[v] = [] | |
238 | if (v_edge, parent, parent_edge) not in tree[v]: | |
239 | tree[v].append((v_edge, parent, parent_edge)) | |
240 | # Iteration | |
241 | for node_edge, neighbor, neighbor_edge in node: | |
242 | value = (v, node_edge, neighbor, neighbor_edge) | |
243 | to_visit.append(value) | |
244 | remove_leafs(tree, cnodes) | |
245 | return tree, (len(nodes) == 0) | |
246 | ||
247 | ||
248 | def remove_leafs(tree, nodes): | |
249 | ||
250 | def get_leafs(tree, nodes): | |
251 | return [node for node, edges in tree.items() | |
252 | if len(edges) < 2 and node not in nodes] | |
253 | ||
254 | def delete_edge_leafs(tree, leaf): | |
255 | for node, edges in tree.items(): | |
256 | for node_edge, neighbor, neighbor_edge in edges: | |
257 | if leaf == neighbor: | |
258 | edge = (node_edge, neighbor, neighbor_edge) | |
259 | tree[node].remove(edge) | |
260 | del tree[leaf] | |
261 | ||
262 | leafs = get_leafs(tree, nodes) | |
263 | iterations = 0 | |
264 | while leafs or iterations > len(tree) ^ 2: | |
265 | for node in leafs: | |
266 | if node in tree: | |
267 | delete_edge_leafs(tree, node) | |
268 | leafs = get_leafs(tree, nodes) | |
269 | iterations += 0 | |
270 | return tree | |
271 | ||
272 | ||
273 | def qbe_forest(graph, nodes): | |
274 | forest = [] | |
275 | for node, edges in graph.items(): | |
276 | tree, are_all = qbe_tree(graph, copy(nodes), root=node) | |
277 | if are_all and tree not in forest: | |
278 | forest.append(tree) | |
279 | return sorted(forest, cmp=lambda x, y: cmp(len(x), len(y))) | |
280 | ||
281 | ||
282 | def find_all_paths(graph, start_node, end_node, path=None): | |
283 | if not path: | |
284 | path = [] | |
285 | path = path + [start_node] | |
286 | if start_node == end_node: | |
287 | return [path] | |
288 | if start_node not in graph: | |
289 | return [] | |
290 | paths = [] | |
291 | for source_edge, target_node, target_edge in graph[start_node]: | |
292 | if target_node not in path: | |
293 | newpaths = find_all_paths(graph, target_node, end_node, path) | |
294 | for newpath in newpaths: | |
295 | paths.append(newpath) | |
296 | return paths | |
297 | ||
298 | ||
299 | def find_minimal_paths(graph, start_node, end_node): | |
300 | ||
301 | def find_all_paths(graph, start_node, end_node, start_edge, end_edge, | |
302 | path=None, minimun=float("Inf")): | |
303 | if not path: | |
304 | path = [] | |
305 | path = path + [start_node] | |
306 | if start_node == end_node: | |
307 | return [path], minimun | |
308 | if start_node not in graph: | |
309 | return [], minimun | |
310 | paths = [] | |
311 | if len(path) < minimun: | |
312 | for source_edge, target_node, target_edge in graph[start_node]: | |
313 | if target_node not in path: | |
314 | newpaths, minimun = find_all_paths(graph, target_node, | |
315 | end_node, | |
316 | target_edge, | |
317 | source_edge, | |
318 | path, minimun) | |
319 | for newpath in newpaths: | |
320 | newpath_length = len(newpath) | |
321 | if minimun > newpath_length: | |
322 | minimun = newpath_length | |
323 | if newpath not in paths: | |
324 | paths.append(newpath) | |
325 | return paths, minimun | |
326 | ||
327 | paths, minimun = find_all_paths(graph, start_node, end_node, | |
328 | start_edge=None, end_edge=None, | |
329 | path=None, minimun=float("Inf")) | |
330 | return paths | |
331 | ||
332 | ||
333 | def _combine(items, val=None, paths=None, length=None): | |
334 | if not paths: | |
335 | paths = [] | |
336 | if not length: | |
337 | length = len(items) | |
338 | if not val: | |
339 | val = [] | |
340 | if len(val) == length - 1 and len(items) == 1: | |
341 | return [(val + [i]) for i in items[0]] | |
342 | for i, item in enumerate(items[:-1]): | |
343 | for value in item: | |
344 | val.append(value) | |
345 | path = _combine(items[i + 1:], val, paths, length) | |
346 | val.pop() | |
347 | ||
348 | def visited_path(x): | |
349 | return x not in paths | |
350 | path = filter(visited_path, path) | |
351 | paths.extend(path) | |
352 | return paths | |
353 | ||
354 | ||
355 | def combine(items, k=None): | |
356 | """ | |
357 | Create a matrix in wich each row is a tuple containing one of solutions or | |
358 | solution k-esima. | |
359 | """ | |
360 | length_items = len(items) | |
361 | lengths = [len(i) for i in items] | |
362 | length = reduce(lambda x, y: x * y, lengths) | |
363 | repeats = [reduce(lambda x, y: x * y, lengths[i:]) | |
364 | for i in range(1, length_items)] + [1] | |
365 | if k is not None: | |
366 | k = k % length | |
367 | # Python division by default is integer division (~ floor(a/b)) | |
368 | indices = [(k % (lengths[i] * repeats[i])) / repeats[i] | |
369 | for i in range(length_items)] | |
370 | return [items[i][indices[i]] for i in range(length_items)] | |
371 | else: | |
372 | matrix = [] | |
373 | for i, item in enumerate(items): | |
374 | row = [] | |
375 | for subset in item: | |
376 | row.extend([subset] * repeats[i]) | |
377 | times = length / len(row) | |
378 | matrix.append(row * times) | |
379 | # Transpose the matrix or return the columns instead rows | |
380 | return zip(*matrix) | |
381 | ||
382 | ||
383 | def graphs_join(graphs): | |
384 | print "Combine % elements" % len(graphs) | |
385 | return [] | |
386 | ||
387 | ||
388 | def autocomplete_graph(admin_site, current_models, directed=False): | |
389 | graph = qbe_graph(admin_site, directed=directed) | |
390 | valid_paths = [] | |
391 | for c, d in combinations(current_models, 2): | |
392 | paths = find_minimal_paths(graph, c, d) | |
393 | combined_sets = combine(paths) | |
394 | for combined_set in combined_sets: | |
395 | path = graphs_join(combined_set) | |
396 | valid_paths.append(path) | |
397 | # for path in paths: | |
398 | # if all(map(lambda x: x in path, current_models)): | |
399 | # if path not in valid_paths: | |
400 | # valid_paths.append(path) | |
401 | return sorted(valid_paths, cmp=lambda x, y: cmp(len(x), len(y))) | |
402 | ||
403 | ||
404 | # Taken from django.contrib.sessions.backends.base | |
405 | def pickle_encode(session_dict): | |
406 | "Returns the given session dictionary pickled and encoded as a string." | |
407 | pickled = pickle.dumps(session_dict, pickle.HIGHEST_PROTOCOL) | |
408 | pickled_md5 = md5_constructor(pickled + settings.SECRET_KEY).hexdigest() | |
409 | return base64.encodestring(pickled + pickled_md5) | |
410 | ||
411 | ||
412 | # Adapted from django.contrib.sessions.backends.base | |
413 | def pickle_decode(session_data): | |
414 | # The '+' character is translated to ' ' in request | |
415 | session_data = session_data.replace(" ", "+") | |
416 | # The length of the encoded string should be a multiple of 4 | |
417 | while (((len(session_data) / 4.0) - (len(session_data) / 4)) != 0): | |
418 | session_data += u"=" | |
419 | encoded_data = base64.decodestring(session_data) | |
420 | pickled, tamper_check = encoded_data[:-32], encoded_data[-32:] | |
421 | pickled_md5 = md5_constructor(pickled + settings.SECRET_KEY).hexdigest() | |
422 | if pickled_md5 != tamper_check: | |
423 | raise SuspiciousOperation("User tampered with session cookie.") | |
424 | try: | |
425 | return pickle.loads(pickled) | |
426 | # Unpickling can cause a variety of exceptions. If something happens, | |
427 | # just return an empty dictionary (an empty session). | |
428 | except: | |
429 | return {} |