| | 1 | {{{ |
| | 2 | def import_excel_sheet(table, filename, num_columns, sheet_index=0, header_row=1, truncate_table=True, cut_columns=(), append_column_names={}): |
| | 3 | """ |
| | 4 | generic function for getting a table of data from an excel spreadheet |
| | 5 | header_row - index at 1 - like excel |
| | 6 | """ |
| | 7 | import datetime |
| | 8 | |
| | 9 | try: |
| | 10 | import xlrd |
| | 11 | except ImportError: |
| | 12 | session.error = T('xlrd module not available within the running Python - this needs installing to do XLS Reporting!') |
| | 13 | |
| | 14 | #Import Workbooks |
| | 15 | workbook = xlrd.open_workbook(filename, formatting_info=True) |
| | 16 | |
| | 17 | sheet = workbook.sheet_by_index(sheet_index) |
| | 18 | |
| | 19 | #Make a list of the rows in which the first cell is merged |
| | 20 | merged_rows = [] |
| | 21 | for crange in sheet.merged_cells: |
| | 22 | rlo, rhi, clo, chi = crange |
| | 23 | if clo == 0: |
| | 24 | merged_rows.append(rlo) |
| | 25 | |
| | 26 | |
| | 27 | #Simplfy Formatting, rename column headers |
| | 28 | rows = [] |
| | 29 | rows.append(sheet.row(header_row-1)) |
| | 30 | |
| | 31 | for i in xrange(header_row,sheet.nrows): |
| | 32 | #if the first cell of this row is merged, skip the entire row |
| | 33 | if i not in merged_rows: |
| | 34 | rows.append(sheet.row(i)) |
| | 35 | break |
| | 36 | else: |
| | 37 | pass |
| | 38 | #print "ERROR! all rows merged" |
| | 39 | #should add error checking code here |
| | 40 | |
| | 41 | #rename column headers for columns with the same name |
| | 42 | for key,value in append_column_names.items(): |
| | 43 | rows[0][key].value = value + rows[0][key].value |
| | 44 | |
| | 45 | #Cut Coulmns |
| | 46 | for i in [0,1]: |
| | 47 | for j in reversed(cut_columns): |
| | 48 | del rows[i][j] |
| | 49 | rows[i] = rows[i][:num_columns] |
| | 50 | |
| | 51 | #Get the headers for the field names |
| | 52 | row_string = "" |
| | 53 | fields = [] |
| | 54 | #There is a more optimize way of doing this |
| | 55 | for i in xrange(0,len(rows[1])): |
| | 56 | #Clean the field name |
| | 57 | field_name = rows[0][i].value.strip() |
| | 58 | field_name = field_name.replace(" ","_") |
| | 59 | field_name = field_name.replace(".","") |
| | 60 | field_name = field_name.replace("/","") |
| | 61 | field_name = field_name.replace("-","_") |
| | 62 | field_name = field_name.replace("(","_") |
| | 63 | field_name = field_name.replace(")","_") |
| | 64 | field_name = field_name.replace(",","_") |
| | 65 | field_name = field_name.lower() |
| | 66 | rows[0][i].value = field_name |
| | 67 | |
| | 68 | #store string and numbers as strings |
| | 69 | type = "string" |
| | 70 | if rows[1][i].ctype == 3: |
| | 71 | type = "date" |
| | 72 | |
| | 73 | #print "Adding Field #" + str(i) + " Name:" + field_name |
| | 74 | fields.append(db.Field(field_name , type)) |
| | 75 | |
| | 76 | #This presumes that if the temp table is in the DB, it will have the same format, and the records can be appended |
| | 77 | if table not in db: |
| | 78 | db.define_table(table, migrate=True, *fields) |
| | 79 | if truncate_table: |
| | 80 | db[table].truncate() |
| | 81 | #db.commit() |
| | 82 | |
| | 83 | row_string = "" |
| | 84 | fields = {} |
| | 85 | |
| | 86 | for i in xrange(header_row,sheet.nrows): #TEST DATA - not getting it all sheet.nrows): # |
| | 87 | #Get a new row of data |
| | 88 | del rows[1] |
| | 89 | rows.append(sheet.row(i)) |
| | 90 | #print rows |
| | 91 | |
| | 92 | #If the first 3 cells is empty, this is the end of the data |
| | 93 | if rows[1][0].value == '' and rows[1][1].value == '' and rows[1][2].value == '': |
| | 94 | break |
| | 95 | |
| | 96 | #if the first cell of this row is merged, skip the entire row |
| | 97 | if i in merged_rows: |
| | 98 | continue |
| | 99 | |
| | 100 | #Cut Columns |
| | 101 | for j in reversed(cut_columns): |
| | 102 | del rows[1][j] |
| | 103 | rows[1] = rows[1][:num_columns] |
| | 104 | |
| | 105 | for j in xrange(0, len(rows[0]) ): |
| | 106 | cell_value = rows[1][j].value |
| | 107 | |
| | 108 | #print str(rows[0][j].value) + ":" + str(cell_value) |
| | 109 | |
| | 110 | if rows[1][j].ctype == 1: # String |
| | 111 | cell_value = cell_value.replace("," , "") # get rid of commas |
| | 112 | cell_value = cell_value.replace("'" , "") # get rid of commas |
| | 113 | cell_value = cell_value.replace(r'\xa0' , "") # get rid of special char - not working |
| | 114 | |
| | 115 | #remove '\x' and the 2 caracters that follow it - THIS NEEDS TO BE FIXED |
| | 116 | index = cell_value.find(r'\x') |
| | 117 | if index <> -1: # #\x found |
| | 118 | #print "removing messy chars: " + cell_value[index:index+4] |
| | 119 | del cell_value[index:index+4] |
| | 120 | |
| | 121 | elif rows[1][j].ctype == 3: # Date |
| | 122 | #Convert Date |
| | 123 | date_tuple = xlrd.xldate_as_tuple(cell_value,0) |
| | 124 | date = datetime.date(*date_tuple[:3]) |
| | 125 | cell_value = date.isoformat() |
| | 126 | else: # not Date |
| | 127 | cell_value = str(cell_value) |
| | 128 | |
| | 129 | if db[table][str(rows[0][j].value)].type == "date" and rows[1][j].ctype <> 3: #Date |
| | 130 | if cell_value <> "" and cell_value <> "-" and cell_value <> "?": |
| | 131 | #This should print errors to a file |
| | 132 | #print "Date Error: '" + cell_value + "' (" + str(i) + "," + str(j) |
| | 133 | pass |
| | 134 | cell_value = None |
| | 135 | |
| | 136 | if cell_value == '' or cell_value == '-': |
| | 137 | cell_value = None |
| | 138 | |
| | 139 | #field:value distionary pair |
| | 140 | fields[str(rows[0][j].value)] = cell_value |
| | 141 | |
| | 142 | #print fields |
| | 143 | |
| | 144 | db[table].insert(**fields) |
| | 145 | |
| | 146 | db.commit() |
| | 147 | |
| | 148 | return |
| | 149 | |
| | 150 | def add_lookup_data(lookup_table, |
| | 151 | lookup_field, |
| | 152 | **args): |
| | 153 | """ |
| | 154 | args: |
| | 155 | lookup_table - string - the table which the data is being added to |
| | 156 | lookup_field - string - the field which in the lookup_table which the new data is being added to |
| | 157 | lookup_field_string_size - int - restriction on the length of the string |
| | 158 | lookup_field_is_int - validates the lookup aas an int |
| | 159 | lookup_field_alt - string - an altenative field for the new data to be added, this field should be a string |
| | 160 | source_data - list.dict(table,fileld) - the (temporary) table(s) where the imported data is being read from |
| | 161 | """ |
| | 162 | db[lookup_table].truncate() |
| | 163 | if lookup_table == "pr_person": |
| | 164 | db.pr_pentity.truncate() |
| | 165 | |
| | 166 | read_table = args["source_data"][0]["table"] |
| | 167 | read_field = args["source_data"][0]["field"] |
| | 168 | |
| | 169 | #If >1 source tables, create a temporary table |
| | 170 | if len(args["source_data"]) > 1: |
| | 171 | read_table = "temp" |
| | 172 | read_field = lookup_field |
| | 173 | try: |
| | 174 | db.executesql("DROP TABLE temp") |
| | 175 | except: |
| | 176 | pass |
| | 177 | |
| | 178 | db.executesql("CREATE TABLE temp( " + lookup_field+ " string)") |
| | 179 | for source in args["source_data"]: |
| | 180 | db.executesql("INSERT INTO temp(" + lookup_field+ ") SELECT DISTINCT " + source["table"] + "." + source["field"] + " FROM " + source["table"]) |
| | 181 | #print "INSERT INTO temp(" + lookup_field+ ") SELECT DISTINCT " + source["table"] + "." + source["field"] + " FROM " + source["table"] |
| | 182 | |
| | 183 | #Create additional columns to store IDs for the lookup data in the source tables |
| | 184 | for source in args["source_data"]: |
| | 185 | try: |
| | 186 | db.executesql("ALTER TABLE " + source["table"] + " ADD " + source["field"] + "_" + lookup_table + "_id integer") |
| | 187 | #print "ID field added ok" |
| | 188 | except: |
| | 189 | pass |
| | 190 | |
| | 191 | unique_i = 1 |
| | 192 | |
| | 193 | #print db.executesql("SELECT DISTINCT " + read_field + " FROM " + read_table + " ORDER BY " + read_field) |
| | 194 | |
| | 195 | for data in db.executesql("SELECT DISTINCT " + read_field + " FROM " + read_table): |
| | 196 | if data[0] == "NULL" or data[0] == None: |
| | 197 | continue |
| | 198 | |
| | 199 | if lookup_table == "pr_person": |
| | 200 | person_split = data[0].split(" ") |
| | 201 | #pad the end, so that the are always three names |
| | 202 | for i in xrange(len(person_split), 3): |
| | 203 | person_split.append("") |
| | 204 | |
| | 205 | #Each person needs an entity |
| | 206 | pr_pe_id = db.pr_pentity.insert(opt_pr_entity_type=1,label=None) |
| | 207 | new_id = db.pr_person.insert(pr_pe_id = pr_pe_id, |
| | 208 | pr_pe_label=None, |
| | 209 | preferred_name = data[0], |
| | 210 | first_name = person_split[0] , |
| | 211 | middle_name = person_split[1] , |
| | 212 | last_name = person_split[2]) |
| | 213 | |
| | 214 | #Data must be an Integer |
| | 215 | elif args.get("lookup_field_is_int",False): |
| | 216 | data_int = validate_int(data) |
| | 217 | |
| | 218 | if data_int <> None: |
| | 219 | if db(db[lookup_table][lookup_field] == data_int).count() ==0: |
| | 220 | #print "data int: " + str(data_int) |
| | 221 | new_id = db[lookup_table].insert(**{lookup_field: data_int}) |
| | 222 | #print "data int added" |
| | 223 | else: |
| | 224 | unique_i = unique_i+1 # unique dummy value |
| | 225 | #print "non data int: " + data[0] + "unique_i: " + str(unique_i) |
| | 226 | new_id = db[lookup_table].insert(**{lookup_field: unique_i, args["lookup_field_alt"]:data[0]}) |
| | 227 | |
| | 228 | #There is a restriction on the length of the data string |
| | 229 | elif "lookup_field_string_size" in args: |
| | 230 | #FIX - Errors could occur if the cut data isn't unique |
| | 231 | if len(data[0]) > args["lookup_field_string_size"]: |
| | 232 | #cut the data and place the full string in the alternative field |
| | 233 | data_cut = data[0][args["lookup_field_string_size"]:] |
| | 234 | new_id = db[lookup_table].insert(**{lookup_field: data_cut, args["lookup_field_alt"]: data[0]}) |
| | 235 | else: |
| | 236 | new_id = db[lookup_table].insert(**{lookup_field: data[0]}) |
| | 237 | |
| | 238 | #The field is a string with no restriction |
| | 239 | else: |
| | 240 | new_id = db[lookup_table].insert(**{lookup_field: data[0]}) |
| | 241 | |
| | 242 | #Add Lookup ID to the source tables |
| | 243 | for source in args["source_data"]: |
| | 244 | #print "UPDATE " + source["table"] + " SET " + source["table"] + "_" + lookup_table + "_id = " + str(new_id) + " WHERE " + source["field"] + " = '" + str(data[0]) + "'" |
| | 245 | db.executesql("UPDATE " + source["table"] + " SET " + source["field"] + "_" + lookup_table + "_id = " + str(new_id) + " WHERE " + source["field"] + " = '" + str(data[0]) + "'") |
| | 246 | |
| | 247 | try: |
| | 248 | db.executesql("DROP TABLE temp") |
| | 249 | except: |
| | 250 | pass |
| | 251 | |
| | 252 | return |
| | 253 | |
| | 254 | def format_price(price): |
| | 255 | |
| | 256 | currency_map = dict( FEC= 'FEC', MMK = 'MMK', kyat= 'MMK', USD= 'USD', Kyats = 'MMK', THB = 'THB', GBP = 'GBP', USUSD = 'USD', Bath = 'THB', Kyat = 'MMK') |
| | 257 | |
| | 258 | #lookup currency - this code is duplicated |
| | 259 | if price <> None: |
| | 260 | #print "Value: " + str(price) |
| | 261 | try: |
| | 262 | price = float(price) |
| | 263 | currency_id = db(db.fin_currency.name == "MMK").select(db.fin_currency.id)[0]['id'] |
| | 264 | except: |
| | 265 | #print "price except: " + str(value) |
| | 266 | price = price.replace("$","USD") |
| | 267 | #price = price.replace("£","GBP") Error |
| | 268 | price = price.replace(" ","") |
| | 269 | #print currency_map |
| | 270 | for key, data in currency_map.iteritems(): |
| | 271 | if price.find(key) <> -1: |
| | 272 | price = price.replace(key, "") |
| | 273 | #print "price: " + price + " Key: " + key |
| | 274 | currency_id = db(db.fin_currency.name == data).select(db.fin_currency.id)[0]['id'] |
| | 275 | try: |
| | 276 | price = float(price) |
| | 277 | except: |
| | 278 | #print "OT Price ERROR!!! '" + str(price) + "'" |
| | 279 | pass |
| | 280 | break |
| | 281 | else: |
| | 282 | currency_id = None |
| | 283 | |
| | 284 | return price, currency_id |
| | 285 | |
| | 286 | def validate_date(date_str): |
| | 287 | if date_str <> None and date_str <> "": |
| | 288 | date_str = date_str.replace("NA","") |
| | 289 | try: |
| | 290 | time.strptime(date_str,"%Y-%m-%d") |
| | 291 | except: |
| | 292 | #print "ERROR Date: " + str(date_str) |
| | 293 | date_str = None |
| | 294 | else: |
| | 295 | date_str = None |
| | 296 | return date_str |
| | 297 | |
| | 298 | def validate_int(int_str): |
| | 299 | return_int = None |
| | 300 | if int_str <> None: |
| | 301 | try: |
| | 302 | return_int = int(int_str[0].replace(".0","")) # trim all decimal |
| | 303 | except: |
| | 304 | pass |
| | 305 | return return_int |
| | 306 | |
| | 307 | def find_key(dic, val): |
| | 308 | # |
| | 309 | """return the key of dictionary dic given the value""" |
| | 310 | # |
| | 311 | return [k for k, v in dic.iteritems() if v == val][0] |
| | 312 | |
| | 313 | |
| | 314 | |
| | 315 | def import_excel_data(): |
| | 316 | |
| | 317 | import time |
| | 318 | |
| | 319 | #print "--------------------------------------------------------------------------" |
| | 320 | |
| | 321 | #OT |
| | 322 | cut_columns = [5] |
| | 323 | for i in xrange(19,32): |
| | 324 | cut_columns.append(i) |
| | 325 | |
| | 326 | cut_columns.append(41) |
| | 327 | |
| | 328 | append_column_names= {42:"a_",43:"a_",44:"a_",45:"b_",46:"b_",47:"b_",48:"c_",49:"c_",50:"c_"} |
| | 329 | #import_excel_sheet(table, filename, num_columns, sheet_index=0, header_row=0, cut_columns=(), append_column_names={}): |
| | 330 | |
| | 331 | import_excel_sheet( "ot_temp", |
| | 332 | r'/tmp/import/ot.xls', |
| | 333 | 39, |
| | 334 | 2, |
| | 335 | 5, |
| | 336 | True, |
| | 337 | cut_columns, |
| | 338 | append_column_names) |
| | 339 | |
| | 340 | #ASSET |
| | 341 | append_column_names= {18:"cost_",20:"cost_",26:"sale_",27:"sale_"} |
| | 342 | import_excel_sheet( "asset_temp", |
| | 343 | r'/tmp/import/asset.xls', |
| | 344 | 31, |
| | 345 | 0, |
| | 346 | 2, |
| | 347 | True, |
| | 348 | [], |
| | 349 | append_column_names) |
| | 350 | |
| | 351 | #CATALOG |
| | 352 | for i in xrange(1,8): |
| | 353 | if i == 1: |
| | 354 | truncate_table=True |
| | 355 | num_columns = 12 #for sub_cat |
| | 356 | else: |
| | 357 | truncate_table=False |
| | 358 | num_columns = 11 |
| | 359 | import_excel_sheet( "catalog_temp", |
| | 360 | r'/tmp/import/catalog.xls', |
| | 361 | num_columns, |
| | 362 | i, |
| | 363 | 3, |
| | 364 | truncate_table) |
| | 365 | |
| | 366 | #CATEGORIES |
| | 367 | import_excel_sheet( table = "cat_temp", |
| | 368 | filename = r'/tmp/import/cat.xls', |
| | 369 | num_columns = 3, |
| | 370 | header_row=1, |
| | 371 | truncate_table=True) |
| | 372 | |
| | 373 | import_excel_sheet( table = "cat_map_temp", |
| | 374 | filename = r'/tmp/import/cat_map.xls', |
| | 375 | num_columns = 4, |
| | 376 | header_row=1, |
| | 377 | truncate_table=True) |
| | 378 | |
| | 379 | #UPDATE LOOKUPS |
| | 380 | #Fix the IDs for these in dictionaries (with the string from the orig table as the key) |
| | 381 | #Could these be functions? |
| | 382 | #people |
| | 383 | add_lookup_data( lookup_table = "pr_person", |
| | 384 | lookup_field = "name", |
| | 385 | source_data = [{"table": "ot_temp", "field": "order_requestor"}, |
| | 386 | {"table": "ot_temp", "field": "received_by"}, |
| | 387 | {"table": "asset_temp", "field": "person_responsible"} ] |
| | 388 | ) |
| | 389 | |
| | 390 | #code |
| | 391 | add_lookup_data( lookup_table = "fin_code", |
| | 392 | lookup_field = "name", |
| | 393 | lookup_field_is_int = True, |
| | 394 | lookup_field_alt = "comments", |
| | 395 | source_data = [{"table": "asset_temp", "field": "account_code"}] |
| | 396 | ) |
| | 397 | |
| | 398 | #contract |
| | 399 | add_lookup_data( lookup_table = "fin_contract", |
| | 400 | lookup_field = "name", |
| | 401 | source_data = [{"table": "asset_temp", "field": "contract"}], |
| | 402 | lookup_field_string_size = 6, |
| | 403 | lookup_field_alt = "comments" |
| | 404 | ) |
| | 405 | |
| | 406 | #project |
| | 407 | add_lookup_data( lookup_table = "fin_project", |
| | 408 | lookup_field = "name", |
| | 409 | source_data = [{"table": "asset_temp", "field": "project_id"}], |
| | 410 | lookup_field_string_size = 6, |
| | 411 | lookup_field_alt = "comments" |
| | 412 | ) |
| | 413 | #unit |
| | 414 | add_lookup_data( lookup_table = "mat_unit", |
| | 415 | lookup_field = "name", |
| | 416 | source_data = [{"table": "ot_temp", "field": "unit"}, |
| | 417 | {"table": "catalog_temp", "field": "unit"} ] |
| | 418 | ) |
| | 419 | |
| | 420 | #supplier |
| | 421 | add_lookup_data( lookup_table = "mat_supplier", |
| | 422 | lookup_field = "name", |
| | 423 | source_data = [{"table": "ot_temp", "field": "sourced_from__supplier_name"}, |
| | 424 | {"table": "ot_temp", "field": "a_supplier"}, |
| | 425 | {"table": "ot_temp", "field": "b_supplier"}, |
| | 426 | {"table": "ot_temp", "field": "c_supplier"}, |
| | 427 | {"table": "catalog_temp", "field": "supplier"} ] |
| | 428 | ) |
| | 429 | |
| | 430 | #category |
| | 431 | try: |
| | 432 | db.executesql("ALTER TABLE cat_temp ADD mat_category_id integer") |
| | 433 | except: |
| | 434 | pass |
| | 435 | |
| | 436 | try: |
| | 437 | db.executesql("ALTER TABLE cat_map_temp ADD mat_category_id integer") |
| | 438 | except: |
| | 439 | pass |
| | 440 | |
| | 441 | try: |
| | 442 | db.executesql("ALTER TABLE cat_map_temp ADD mat_sub_category_id integer") |
| | 443 | except: |
| | 444 | pass |
| | 445 | |
| | 446 | #Add categories |
| | 447 | db.mat_category.truncate() |
| | 448 | for data in db.executesql("SELECT DISTINCT category_name, category_description FROM cat_temp"): |
| | 449 | new_id = db.mat_category.insert(name= data[0], description= data[1]) |
| | 450 | db.executesql("UPDATE cat_temp SET mat_category_id = " + str(new_id) + " WHERE category_name = '" + str(data[0]) + "'") |
| | 451 | |
| | 452 | #Add Sub-categories |
| | 453 | db.mat_sub_category.truncate() |
| | 454 | for category_name, sub_category, mat_category_id in db.executesql("SELECT DISTINCT category_name, sub_category, mat_category_id FROM cat_temp"): |
| | 455 | new_id = db.mat_sub_category.insert(name= sub_category, category_id = mat_category_id) |
| | 456 | |
| | 457 | #Set mat_category_id and mat_sub_category_id in cat_map_temp table |
| | 458 | db.executesql("UPDATE cat_map_temp SET mat_sub_category_id = " + str(new_id) + ", mat_category_id = " + str(mat_category_id) + |
| | 459 | " WHERE category_name = '" + category_name + "' AND sub_category = '" + sub_category + "'") |
| | 460 | |
| | 461 | #item |
| | 462 | db.mat_item.truncate() |
| | 463 | try: |
| | 464 | db.executesql("ALTER TABLE catalog_temp ADD mat_item_id integer") |
| | 465 | except: |
| | 466 | pass |
| | 467 | |
| | 468 | currency_map = dict( FEC= 'FEC', MMK = 'MMK', kyat= 'MMK', USD= 'USD', Kyats = 'MMK', THB = 'THB', GBP = 'GBP', USUSD = "USD", Bath = 'THB', Kyat = 'MMK') |
| | 469 | |
| | 470 | #Add All of the Items from the catalog (because this has the most complete details) |
| | 471 | for name, \ |
| | 472 | sub_category, \ |
| | 473 | category, \ |
| | 474 | unit_id, \ |
| | 475 | price, \ |
| | 476 | currency, \ |
| | 477 | supplier_id, \ |
| | 478 | comments \ |
| | 479 | in db.executesql("SELECT DISTINCT description__item, " + |
| | 480 | "sub_category," + |
| | 481 | "cat," + |
| | 482 | "unit_mat_unit_id, " + |
| | 483 | "price, " + |
| | 484 | "currency, " + |
| | 485 | "supplier_mat_supplier_id, " + |
| | 486 | "remarks__quality__delay__discount_if_big_amount " + |
| | 487 | "FROM catalog_temp"): |
| | 488 | |
| | 489 | if sub_category <> None: |
| | 490 | #print "sub category: " + str(sub_category) |
| | 491 | #print "SELECT mat_category_id, mat_sub_category_id FROM cat_map_temp WHERE key = '" + str(sub_category) + "'" |
| | 492 | cat_map_row = db.executesql("SELECT mat_category_id, mat_sub_category_id FROM cat_map_temp WHERE key = '" + str(sub_category) + "'") |
| | 493 | #print "cat_map_row length: " + str(len(cat_map_row)) |
| | 494 | #print cat_map_row |
| | 495 | elif category <> None: |
| | 496 | #print "category: " + str(category) |
| | 497 | cat_map_row = db.executesql("SELECT mat_category_id, mat_sub_category_id FROM cat_map_temp WHERE key = '" + str(category) + "'") |
| | 498 | #print "cat_map_row length: " + str(len(cat_map_row)) |
| | 499 | #print cat_map_row |
| | 500 | else: |
| | 501 | #print "Item: " + name |
| | 502 | #print "NO CATEGORY LISTED!!!" |
| | 503 | pass |
| | 504 | |
| | 505 | if len(cat_map_row) >= 1: |
| | 506 | category_id = cat_map_row[0][0] |
| | 507 | sub_category_id = cat_map_row[0][1] |
| | 508 | |
| | 509 | if currency <> None: |
| | 510 | #print " Currency: " + str(currency) |
| | 511 | currency = currency.replace("$","USD") |
| | 512 | currency_id = db(db.fin_currency.name == currency_map[currency]).select(db.fin_currency.id)[0]['id'] |
| | 513 | else: |
| | 514 | currency_id =None |
| | 515 | |
| | 516 | if price <> None: |
| | 517 | try: |
| | 518 | unit_cost = float(price) |
| | 519 | except: |
| | 520 | #print "Catalog Price ERROR!!! '" + str(price) + "'" |
| | 521 | pass |
| | 522 | else: |
| | 523 | unit_cost = None |
| | 524 | |
| | 525 | new_id = db.mat_item.insert(name= name, |
| | 526 | category_id = category_id, |
| | 527 | sub_category_id = sub_category_id, |
| | 528 | unit_id = unit_id, |
| | 529 | unit_cost = unit_cost, |
| | 530 | currency_id = currency_id, |
| | 531 | supplier_id = supplier_id, |
| | 532 | comments = comments |
| | 533 | ) |
| | 534 | |
| | 535 | #Set mat_category_id and mat_sub_category_id in cat_map_temp table |
| | 536 | db.executesql("UPDATE catalog_temp SET mat_item_id = " + str(new_id) + "WHERE description__item = '" + name + "'" ) |
| | 537 | |
| | 538 | #Add Items from Asset |
| | 539 | try: |
| | 540 | db.executesql("ALTER TABLE asset_temp ADD mat_item_id integer") |
| | 541 | except: |
| | 542 | pass |
| | 543 | |
| | 544 | try: |
| | 545 | db.executesql("ALTER TABLE asset_temp ADD category_id integer") |
| | 546 | except: |
| | 547 | pass |
| | 548 | |
| | 549 | try: |
| | 550 | db.executesql("ALTER TABLE asset_temp ADD sub_category_id integer") |
| | 551 | except: |
| | 552 | pass |
| | 553 | |
| | 554 | for name, \ |
| | 555 | category, \ |
| | 556 | price, \ |
| | 557 | currency, \ |
| | 558 | comments \ |
| | 559 | in db.executesql("SELECT DISTINCT description, " + |
| | 560 | "category," + |
| | 561 | "cost_amount, " + |
| | 562 | "asset_temp.cost_in_local_currency, " + |
| | 563 | "comments " + |
| | 564 | "FROM asset_temp"): |
| | 565 | |
| | 566 | if category <> None: |
| | 567 | cat_map_row = db.executesql("SELECT mat_category_id, mat_sub_category_id FROM cat_map_temp WHERE key = '" + str(category) + "'") |
| | 568 | |
| | 569 | |
| | 570 | if len(cat_map_row) >= 1: |
| | 571 | sub_category_id = 0 |
| | 572 | category_id = cat_map_row[0][0] |
| | 573 | sub_category_id = cat_map_row[0][1] |
| | 574 | |
| | 575 | #write these back to the Asset_temp table |
| | 576 | db.executesql("UPDATE asset_temp SET category_id = " + str(category_id) + ", sub_category_id = " + str(sub_category_id) + " WHERE description = '" + str(name) + "' AND category = '" + str(category) + "'") |
| | 577 | #else: |
| | 578 | #print "cat_map_row length: " + str(len(cat_map_row)) |
| | 579 | #print str(name) + "cat " + str(category) |
| | 580 | |
| | 581 | if currency <> None: |
| | 582 | #print " Currency: " + str(currency) |
| | 583 | currency = currency.replace("$","USD") |
| | 584 | currency_id = db(db.fin_currency.name == currency_map[currency]).select(db.fin_currency.id)[0]['id'] |
| | 585 | |
| | 586 | else: |
| | 587 | currency_id =None |
| | 588 | |
| | 589 | if price <> None: |
| | 590 | try: |
| | 591 | unit_cost = float(price) |
| | 592 | except: |
| | 593 | #print "Asset Price ERROR!!! '" + str(price) + "'" |
| | 594 | pass |
| | 595 | |
| | 596 | else: |
| | 597 | unit_cost = None |
| | 598 | |
| | 599 | current_item_rows = db(db.mat_item.name == name).select(db.mat_item.id,db.mat_item.name, |
| | 600 | db.mat_item.category_id, |
| | 601 | db.mat_item.sub_category_id, |
| | 602 | db.mat_item.unit_cost, |
| | 603 | db.mat_item.currency_id) |
| | 604 | |
| | 605 | if len(current_item_rows) == 0: |
| | 606 | #Add Item |
| | 607 | item_id = db.mat_item.insert(name= name, |
| | 608 | category_id = category_id, |
| | 609 | sub_category_id = sub_category_id, |
| | 610 | unit_cost = unit_cost, |
| | 611 | currency_id = currency_id, |
| | 612 | comments = comments |
| | 613 | ) |
| | 614 | else: |
| | 615 | #test to see if any additional information can be added |
| | 616 | current_item = current_item_rows[0] |
| | 617 | |
| | 618 | item_id = current_item['id'] |
| | 619 | |
| | 620 | if current_item['sub_category_id'] == None and sub_category_id <> 0: |
| | 621 | db(db.mat_item.name == name).update(category_id = category_id, sub_category_id = sub_category_id) |
| | 622 | #db(db.mat_item.name == name).update(db.mat_item.sub_category_id = sub_category_id) |
| | 623 | |
| | 624 | #don't change the currency if there is a cost but no currency |
| | 625 | if current_item['unit_cost'] == None and current_item['currency_id'] == None and currency_id <> None: |
| | 626 | db(db.mat_item.name == name).update(currency_id = currency_id) |
| | 627 | |
| | 628 | if current_item['unit_cost'] == None and unit_cost <> None: |
| | 629 | db(db.mat_item.name == name).update(unit_cost = unit_cost) |
| | 630 | |
| | 631 | #Set mat_category_id and mat_sub_category_id in cat_map_temp table |
| | 632 | db.executesql("UPDATE asset_temp SET mat_item_id = " + str(item_id) + "WHERE description = '" + name + "'" ) |
| | 633 | |
| | 634 | #Add Items from OT |
| | 635 | try: |
| | 636 | db.executesql("ALTER TABLE ot_temp ADD mat_item_id integer") |
| | 637 | except: |
| | 638 | pass |
| | 639 | |
| | 640 | for name, \ |
| | 641 | unit_id, \ |
| | 642 | supplier_id, \ |
| | 643 | price \ |
| | 644 | in db.executesql("SELECT DISTINCT description, " + |
| | 645 | "unit_mat_unit_id," + |
| | 646 | "sourced_from__supplier_name_mat_supplier_id," + |
| | 647 | "actual_unit_price_in_currency " + |
| | 648 | "FROM ot_temp"): |
| | 649 | |
| | 650 | if name == None: |
| | 651 | continue # skip this line |
| | 652 | |
| | 653 | unit_cost, currency_id = format_price(price) |
| | 654 | |
| | 655 | current_item_rows = db(db.mat_item.name == name).select(db.mat_item.id,db.mat_item.name, |
| | 656 | db.mat_item.unit_id, |
| | 657 | db.mat_item.unit_cost, |
| | 658 | db.mat_item.currency_id, |
| | 659 | db.mat_item.supplier_id) |
| | 660 | |
| | 661 | if len(current_item_rows) == 0: |
| | 662 | #Add Item |
| | 663 | item_id = db.mat_item.insert(name= name, |
| | 664 | unit_id = unit_id, |
| | 665 | unit_cost = unit_cost, |
| | 666 | currency_id = currency_id, |
| | 667 | supplier_id = supplier_id |
| | 668 | ) |
| | 669 | else: |
| | 670 | #test to see if any additional information can be added |
| | 671 | current_item = current_item_rows[0] |
| | 672 | |
| | 673 | item_id = current_item['id'] |
| | 674 | |
| | 675 | if current_item['unit_id'] == None and unit_id <> None: |
| | 676 | db(db.mat_item.name == name).update(unit_id = unit_id) |
| | 677 | |
| | 678 | #don't change the currency if there is a cost but no currency |
| | 679 | if current_item['unit_cost'] == None and current_item['currency_id'] == None and currency_id <> None: |
| | 680 | db(db.mat_item.name == name).update(currency_id = currency_id) |
| | 681 | |
| | 682 | if current_item['unit_cost'] == None and unit_cost <> None: |
| | 683 | db(db.mat_item.name == name).update(unit_cost = unit_cost) |
| | 684 | |
| | 685 | if current_item['supplier_id'] == None and supplier_id <> None: |
| | 686 | db(db.mat_item.name == name).update(supplier_id = supplier_id) |
| | 687 | |
| | 688 | #Set mat_category_id and mat_sub_category_id in cat_map_temp table |
| | 689 | db.executesql("UPDATE ot_temp SET mat_item_id = " + str(item_id) + " WHERE description = '" + name + "'" ) |
| | 690 | |
| | 691 | #donor |
| | 692 | add_lookup_data( lookup_table = "asset_donor", |
| | 693 | lookup_field = "name", |
| | 694 | source_data = [{"table": "asset_temp", "field": "donor"}] |
| | 695 | ) |
| | 696 | |
| | 697 | #office |
| | 698 | add_lookup_data( lookup_table = "asset_office", |
| | 699 | lookup_field = "name", |
| | 700 | source_data = [{"table": "asset_temp", "field": "office"}], |
| | 701 | lookup_field_string_size = 3, |
| | 702 | lookup_field_alt = "comments" |
| | 703 | ) |
| | 704 | |
| | 705 | db.asset_office.insert( |
| | 706 | name = 'Y+D', |
| | 707 | comments = 'Yangon + Dedaye' |
| | 708 | ) |
| | 709 | db.asset_office.insert( |
| | 710 | name = 'BOG', |
| | 711 | comments = 'Bogale' |
| | 712 | ) |
| | 713 | db.asset_office.insert( |
| | 714 | name = 'DRY', |
| | 715 | comments = 'Dryzone' |
| | 716 | ) |
| | 717 | |
| | 718 | #location |
| | 719 | add_lookup_data( lookup_table = "asset_location", |
| | 720 | lookup_field = "name", |
| | 721 | source_data = [{"table": "asset_temp", "field": "location"}] |
| | 722 | ) |
| | 723 | |
| | 724 | #sub_location |
| | 725 | add_lookup_data( lookup_table = "asset_sub_location", |
| | 726 | lookup_field = "name", |
| | 727 | source_data = [{"table": "asset_temp", "field": "sub_location"}] |
| | 728 | ) |
| | 729 | |
| | 730 | #Assets |
| | 731 | db.asset_asset.truncate() |
| | 732 | |
| | 733 | #key = asset_asset fields, value = asset_temp fields |
| | 734 | #Used for storing the data to be inserted |
| | 735 | asset_insert_dict = dict(name = 'ref', |
| | 736 | date = 'date_of_purchase', |
| | 737 | financial_year = 'year', |
| | 738 | donor = 'donor_asset_donor_id', |
| | 739 | code = 'account_code_fin_code_id', |
| | 740 | contract = 'contract_fin_contract_id', |
| | 741 | project = 'project_id_fin_project_id', |
| | 742 | type = 'type', |
| | 743 | item_id = 'mat_item_id', |
| | 744 | category_id = 'category_id', |
| | 745 | sub_category_id = 'sub_category_id', |
| | 746 | person = 'person_responsible_pr_person_id', |
| | 747 | tag = 'asset_tag_no', |
| | 748 | serial = 'serial_no', |
| | 749 | office_id = 'office_asset_office_id', |
| | 750 | location = 'location_asset_location_id', |
| | 751 | sub_location = 'sub_location_asset_sub_location_id', |
| | 752 | po_no = 'po_no', |
| | 753 | purchase_currency = 'cost_in_local_currency', #LOOK UP REQ |
| | 754 | purchase_cost = 'cost_amount', |
| | 755 | purchase_exchange_rate = 'cost_mthly_exchange_rate', |
| | 756 | gbp_purchase_cost = 'cost_in_gbp', |
| | 757 | guarantee = 'date_of_guarantee__expiration', |
| | 758 | status = 'status', |
| | 759 | disposal = 'date_of_disposal', |
| | 760 | sale_currency = 'sale_value_in_local_currency', |
| | 761 | sale_value = 'sale_amount', |
| | 762 | sale_exchange_rate = 'sale_mthly_exchange_rate', |
| | 763 | gbp_sale_value = 'sale_value_gbp', |
| | 764 | donation = 'donation', |
| | 765 | comments = 'comments') |
| | 766 | |
| | 767 | #key = asset_temp fields, value = asset_asset fields |
| | 768 | #Used for mapping the asset_temp fields to the asset_asset fields |
| | 769 | asset_fields_dict = {} |
| | 770 | |
| | 771 | #list = asset_temp fields |
| | 772 | asset_fields = [] |
| | 773 | sql_str = "SELECT " |
| | 774 | |
| | 775 | for key,value in asset_insert_dict.iteritems(): |
| | 776 | asset_fields_dict[value] = key |
| | 777 | asset_fields.append(value) |
| | 778 | sql_str = sql_str + value + "," |
| | 779 | |
| | 780 | sql_str = sql_str[:-1] + " FROM asset_temp" |
| | 781 | |
| | 782 | #print sql_str |
| | 783 | |
| | 784 | type_map = { "str": "string", "unicode": "string", "int": "reference"} |
| | 785 | |
| | 786 | for asset_row in db.executesql(sql_str): |
| | 787 | for field, data in zip(asset_fields,asset_row): |
| | 788 | if field == 'cost_in_local_currency' and data <> None: # Map the currency to the lookup table |
| | 789 | data = data.replace("$","USD") |
| | 790 | data = db(db.fin_currency.name == currency_map[data]).select(db.fin_currency.id)[0]['id'] |
| | 791 | if type(data).__name__ <> "NoneType" and data <> None: |
| | 792 | #print "temp type: " + type(data).__name__ + "\tasset type: " + str(db.asset_asset[asset_fields_dict[field]].type) |
| | 793 | table_field_type = db.asset_asset[asset_fields_dict[field]].type[:9] # Cut to 9, so the different "reference" types can be treated the same |
| | 794 | if type_map[type(data).__name__] <> table_field_type: |
| | 795 | #try to convert the date |
| | 796 | if table_field_type == 'date': |
| | 797 | data = validate_date(data) |
| | 798 | |
| | 799 | #try to convert to float |
| | 800 | elif table_field_type == 'double': |
| | 801 | try: |
| | 802 | data = float(data) |
| | 803 | except: |
| | 804 | #print "ERROR double: " + str(data) |
| | 805 | #print "field: " + field + "\tdata: " + str(data) |
| | 806 | pass |
| | 807 | |
| | 808 | else: |
| | 809 | #print "ERROR other type" |
| | 810 | #print "type: " + table_field_type + "\tfield: " + field + "\tdata: " + str(data) |
| | 811 | pass |
| | 812 | asset_insert_dict[asset_fields_dict[field]] = data |
| | 813 | |
| | 814 | #print asset_insert_dict |
| | 815 | new_id = db.asset_asset.insert(**asset_insert_dict) |
| | 816 | |
| | 817 | #OT |
| | 818 | #add purchase_request |
| | 819 | db.ot_purchase_request.truncate() |
| | 820 | try: |
| | 821 | db.executesql("ALTER TABLE ot_temp ADD request_id integer") |
| | 822 | except: |
| | 823 | pass |
| | 824 | |
| | 825 | office_map = { "Yangon": "YGN" , "Dedaye": "DDY","Dedaya": "DDY", "Yangon + Dedaye": "Y+D", "Bogale": "BOG", "Dryzone": "DRY"} |
| | 826 | |
| | 827 | for name, \ |
| | 828 | date_received, \ |
| | 829 | requestor, \ |
| | 830 | office, \ |
| | 831 | date_processed, \ |
| | 832 | comments \ |
| | 833 | in db.executesql("SELECT DISTINCT req_no, " + |
| | 834 | "date_received," + |
| | 835 | "order_requestor_pr_person_id," + |
| | 836 | "location," + |
| | 837 | "date_processed," + |
| | 838 | "general_information_comments " + |
| | 839 | "FROM ot_temp" ): |
| | 840 | |
| | 841 | #convert office - this code is duplicated |
| | 842 | if office <> "" and office <> None: |
| | 843 | #print office |
| | 844 | #print "office code:" + office_map[office] |
| | 845 | office_id = db(db.asset_office.name == office_map[office]).select(db.asset_office.id)[0]['id'] |
| | 846 | |
| | 847 | status = 2 #Closed - check that all the items are either completed or cancelled XXX check |
| | 848 | |
| | 849 | #Check Dates |
| | 850 | date_received = validate_date(date_received) |
| | 851 | date_processed = validate_date(date_processed) |
| | 852 | |
| | 853 | #Insert request |
| | 854 | request_id = db.ot_purchase_request.insert(name= name, |
| | 855 | date_received = date_received, |
| | 856 | requestor = requestor, |
| | 857 | office_id = office_id, |
| | 858 | date_processed = date_processed, |
| | 859 | comments = comments, |
| | 860 | status = status |
| | 861 | ) |
| | 862 | |
| | 863 | #update request_id in temp table |
| | 864 | #print "UPDATE ot_temp SET request_id = " + str(request_id) + " WHERE req_no = " + name |
| | 865 | db.executesql("UPDATE ot_temp SET request_id = " + str(request_id) + " WHERE req_no = '" + name + "'") |
| | 866 | |
| | 867 | #add order |
| | 868 | db.ot_order.truncate() |
| | 869 | try: |
| | 870 | db.executesql("ALTER TABLE ot_temp ADD order_id integer") |
| | 871 | except: |
| | 872 | pass |
| | 873 | |
| | 874 | for name, \ |
| | 875 | supplier_id \ |
| | 876 | in db.executesql("SELECT DISTINCT purchase_order_no, " + |
| | 877 | "sourced_from__supplier_name_mat_supplier_id " + |
| | 878 | "FROM ot_temp"): |
| | 879 | |
| | 880 | status = 2 #Closed - check that all the items are either completed or cancelled XXX check |
| | 881 | |
| | 882 | #if there is a PO# |
| | 883 | if name <> "-" and name <> None: |
| | 884 | #Insert Purchase |
| | 885 | order_id = db.ot_order.insert(name= name, supplier_id = supplier_id, status = status ) |
| | 886 | #update order_id in temp table |
| | 887 | db.executesql("UPDATE ot_temp SET order_id = " + str(order_id) + " WHERE purchase_order_no = '" + name + "'") |
| | 888 | |
| | 889 | #Loop through all the orders with no PO# (but only add if there is a supplier) |
| | 890 | i = 0 |
| | 891 | for name, supplier_id in db.executesql("SELECT purchase_order_no, " + |
| | 892 | "sourced_from__supplier_name_mat_supplier_id " + |
| | 893 | "FROM ot_temp WHERE order_id Is Null"): |
| | 894 | if supplier_id <> None: |
| | 895 | if name <> "-" or name <> None: |
| | 896 | name = "AUTO_PO_REF" + str(i) |
| | 897 | i = i+1 |
| | 898 | #print "Adding dummy PO#" + name |
| | 899 | #Insert Purchase |
| | 900 | order_id = db.ot_order.insert(name= name, supplier_id = supplier_id, status = status ) |
| | 901 | #update order_id in temp table |
| | 902 | db.executesql("UPDATE ot_temp SET order_id = " + str(order_id) + " WHERE purchase_order_no = '" + name + "'" ) |
| | 903 | else: |
| | 904 | #print "Error these PO shouldn't include those with numbers Name: " + str(name) |
| | 905 | pass |
| | 906 | |
| | 907 | #add invoice |
| | 908 | db.fin_invoice.truncate() |
| | 909 | try: |
| | 910 | db.executesql("ALTER TABLE ot_temp ADD invoice_id integer") |
| | 911 | except: |
| | 912 | pass |
| | 913 | |
| | 914 | for id, \ |
| | 915 | order_id, \ |
| | 916 | date_in, \ |
| | 917 | date_out, \ |
| | 918 | value, \ |
| | 919 | comments \ |
| | 920 | in db.executesql("SELECT id, order_id, date_supplier_invoice_received, date_paid_by_finance, amount_paid, finance_comments FROM ot_temp"): |
| | 921 | |
| | 922 | date_in = validate_date(date_in) |
| | 923 | date_out = validate_date(date_out) |
| | 924 | |
| | 925 | #lookup currency - this code is duplicated |
| | 926 | value, currency_id = format_price(value) |
| | 927 | |
| | 928 | if value <> None: |
| | 929 | #Insert Invoice (if there is a value) |
| | 930 | invoice_id = db.fin_invoice.insert(order_id= order_id, |
| | 931 | date_in =date_in, |
| | 932 | date_out = date_out, |
| | 933 | value = value, |
| | 934 | comments = comments ) |
| | 935 | |
| | 936 | #print "Item id: " + str(id) + "\tinvoice_ id: " + str(invoice_id) |
| | 937 | |
| | 938 | db.executesql("UPDATE ot_temp SET invoice_id = " + str(invoice_id) + " WHERE id = " + str(id) ) |
| | 939 | |
| | 940 | #add grn |
| | 941 | db.ot_grn.truncate() |
| | 942 | |
| | 943 | try: |
| | 944 | db.executesql("ALTER TABLE ot_temp ADD grn_id integer") |
| | 945 | except: |
| | 946 | pass |
| | 947 | |
| | 948 | i=0 |
| | 949 | for id, \ |
| | 950 | name, \ |
| | 951 | date_received, \ |
| | 952 | received_by, \ |
| | 953 | office \ |
| | 954 | in db.executesql("SELECT id, grn_no, date_arrived_in_location, received_by_pr_person_id, location FROM ot_temp"): |
| | 955 | |
| | 956 | date_received = validate_date(date_received) |
| | 957 | |
| | 958 | #convert office |
| | 959 | if office <> "" and office <> None: |
| | 960 | #print "Office: " + str(office) |
| | 961 | office_id = db(db.asset_office.name == office_map[office]).select(db.asset_office.id)[0]['id'] |
| | 962 | |
| | 963 | #If there is GRN Data |
| | 964 | if name <> None or date_received <> None or received_by <> None: |
| | 965 | name = "AUTO_GRN_REF" + str(i) |
| | 966 | i = i+1 |
| | 967 | |
| | 968 | #Insert GRN |
| | 969 | grn_id = db.ot_grn.insert(name= name, |
| | 970 | date_received =date_received, |
| | 971 | received_by = received_by, |
| | 972 | office_id = office_id ) |
| | 973 | db.executesql("UPDATE ot_temp SET grn_id = " + str(grn_id) + " WHERE id = " + str(id) ) |
| | 974 | |
| | 975 | #add waybills |
| | 976 | db.ot_waybill.truncate() |
| | 977 | i=0 |
| | 978 | for index in ('a','b','c'): |
| | 979 | |
| | 980 | try: |
| | 981 | db.executesql("ALTER TABLE ot_temp ADD waybill_id_" + index + " integer") |
| | 982 | except: |
| | 983 | pass |
| | 984 | |
| | 985 | |
| | 986 | for id, \ |
| | 987 | date_dispatched, \ |
| | 988 | supplier_id, \ |
| | 989 | despatched_qty \ |
| | 990 | in db.executesql("SELECT id, " + index + "_date, " + index + "_supplier_mat_supplier_id, " + index + "_despatched_qty FROM ot_temp"): |
| | 991 | |
| | 992 | date_dispatched = validate_date(date_dispatched) |
| | 993 | |
| | 994 | #If there is WB Data |
| | 995 | if date_dispatched <> None or despatched_qty <> None: |
| | 996 | name = "AUTO_WB_REF" + str(i) |
| | 997 | i = i+1 |
| | 998 | |
| | 999 | #InsertWB |
| | 1000 | wb_id = db.ot_waybill.insert(name = name, date_dispatched= date_dispatched, |
| | 1001 | supplier_id = supplier_id ) |
| | 1002 | db.executesql("UPDATE ot_temp SET waybill_id_" + index + " = " + str(wb_id) + " WHERE id = " + str(id) ) |
| | 1003 | |
| | 1004 | #add ot_line_items |
| | 1005 | db.ot_line_item.truncate() |
| | 1006 | |
| | 1007 | try: |
| | 1008 | db.executesql("ALTER TABLE ot_temp ADD wb_id_" + index + " integer") |
| | 1009 | except: |
| | 1010 | pass |
| | 1011 | |
| | 1012 | for id, \ |
| | 1013 | request_id, \ |
| | 1014 | line, \ |
| | 1015 | status, \ |
| | 1016 | order_id, \ |
| | 1017 | grn_id, \ |
| | 1018 | a_waybill_id, \ |
| | 1019 | b_waybill_id, \ |
| | 1020 | c_waybill_id, \ |
| | 1021 | invoice_id, \ |
| | 1022 | item_id, \ |
| | 1023 | quantity_pr, \ |
| | 1024 | quantity_po, \ |
| | 1025 | quantity_grn, \ |
| | 1026 | a_quantity_waybill, \ |
| | 1027 | b_quantity_waybill, \ |
| | 1028 | c_quantity_waybill, \ |
| | 1029 | unit_id, \ |
| | 1030 | budgeted_unit_price, \ |
| | 1031 | actual_unit_price, \ |
| | 1032 | actual_total_price, \ |
| | 1033 | comments \ |
| | 1034 | in db.executesql("SELECT id, " + |
| | 1035 | "request_id, " + |
| | 1036 | "line_item, " + |
| | 1037 | "status, " + |
| | 1038 | "order_id, " + |
| | 1039 | "grn_id, " + |
| | 1040 | "waybill_id_a, " + |
| | 1041 | "waybill_id_b, " + |
| | 1042 | "waybill_id_c, " + |
| | 1043 | "invoice_id, " + |
| | 1044 | "mat_item_id, " + |
| | 1045 | "quanity, " + |
| | 1046 | "qty, " + |
| | 1047 | "qty, " + |
| | 1048 | "a_despatched_qty, " + |
| | 1049 | "b_despatched_qty, " + |
| | 1050 | "c_despatched_qty, " + |
| | 1051 | "unit_mat_unit_id, " + |
| | 1052 | "budgeted_unit_price, " + |
| | 1053 | "actual_unit_price_in_currency, " + |
| | 1054 | "actual_total_price_in_currency, " + |
| | 1055 | "general_information_comments " + |
| | 1056 | "FROM ot_temp"): |
| | 1057 | |
| | 1058 | #convert status |
| | 1059 | status = find_key( ot_line_item_status_opts, status) |
| | 1060 | |
| | 1061 | #Calculate prices and currency |
| | 1062 | budgeted_unit_price, budgeted_currency_id = format_price(budgeted_unit_price) |
| | 1063 | actual_unit_price, actual_currency_id = format_price(actual_unit_price) |
| | 1064 | |
| | 1065 | line = validate_int(line) |
| | 1066 | quantity_pr = validate_int(quantity_pr) |
| | 1067 | quantity_po = validate_int(quantity_po) |
| | 1068 | quantity_grn = validate_int(quantity_grn) |
| | 1069 | a_quantity_waybill = validate_int(a_quantity_waybill) |
| | 1070 | b_quantity_waybill = validate_int(b_quantity_waybill) |
| | 1071 | c_quantity_waybill = validate_int(c_quantity_waybill) |
| | 1072 | |
| | 1073 | |
| | 1074 | line_item_id = db.ot_line_item.insert( request_id = request_id, |
| | 1075 | line = line , |
| | 1076 | status = status, |
| | 1077 | item_id = item_id, |
| | 1078 | quantity_pr = quantity_pr, |
| | 1079 | unit_id = unit_id, |
| | 1080 | budgeted_unit_price = budgeted_unit_price, |
| | 1081 | comments = comments) |
| | 1082 | |
| | 1083 | |
| | 1084 | if order_id <> None: |
| | 1085 | db(db.ot_line_item.id == line_item_id).update(superseded=True) |
| | 1086 | line_item_id = db.ot_line_item.insert( request_id = request_id, |
| | 1087 | line = line , |
| | 1088 | status = status, |
| | 1089 | order_id = order_id, |
| | 1090 | invoice_id = invoice_id, |
| | 1091 | item_id = item_id, |
| | 1092 | quantity_pr = quantity_pr, |
| | 1093 | quantity_po = quantity_po, |
| | 1094 | quantity_grn = quantity_grn, |
| | 1095 | quantity_waybill = a_quantity_waybill, |
| | 1096 | unit_id = unit_id, |
| | 1097 | budgeted_unit_price = budgeted_unit_price, |
| | 1098 | actual_unit_price = actual_unit_price, |
| | 1099 | actual_total_price = actual_total_price, |
| | 1100 | comments = comments) |
| | 1101 | |
| | 1102 | |
| | 1103 | if grn_id <> None: |
| | 1104 | db(db.ot_line_item.id == line_item_id).update(superseded=True) |
| | 1105 | line_item_id = db.ot_line_item.insert( request_id = request_id, |
| | 1106 | line = line , |
| | 1107 | status = status, |
| | 1108 | order_id = order_id, |
| | 1109 | grn_id = grn_id, |
| | 1110 | invoice_id = invoice_id, |
| | 1111 | item_id = item_id, |
| | 1112 | quantity_pr = quantity_pr, |
| | 1113 | quantity_po = quantity_po, |
| | 1114 | quantity_grn = quantity_grn, |
| | 1115 | quantity_waybill = a_quantity_waybill, |
| | 1116 | unit_id = unit_id, |
| | 1117 | budgeted_unit_price = budgeted_unit_price, |
| | 1118 | actual_unit_price = actual_unit_price, |
| | 1119 | actual_total_price = actual_total_price, |
| | 1120 | comments = comments) |
| | 1121 | |
| | 1122 | #if invoice_id <> None: |
| | 1123 | # db(db.ot_line_item.id == line_item_id).update(superseded=True) |
| | 1124 | # db.ot_line_item.insert( request_id = request_id, |
| | 1125 | # line = line , |
| | 1126 | # status = status, |
| | 1127 | # order_id = order_id, |
| | 1128 | # grn_id = grn_id, |
| | 1129 | # waybill_id = a_waybill_id, |
| | 1130 | # invoice_id = invoice_id, |
| | 1131 | # item_id = item_id, |
| | 1132 | # quantity_pr = quantity_pr, |
| | 1133 | # quantity_po = quantity_po, |
| | 1134 | # quantity_grn = quantity_grn, |
| | 1135 | # quantity_waybill = a_quantity_waybill, |
| | 1136 | # unit_id = unit_id, |
| | 1137 | # budgeted_unit_price = budgeted_unit_price, |
| | 1138 | # actual_unit_price = actual_unit_price, |
| | 1139 | # actual_total_price = actual_total_price, |
| | 1140 | # comments = comments) |
| | 1141 | |
| | 1142 | if a_waybill_id <> None: |
| | 1143 | db(db.ot_line_item.id == line_item_id).update(superseded=True) |
| | 1144 | db.ot_line_item.insert( request_id = request_id, |
| | 1145 | line = line , |
| | 1146 | status = status, |
| | 1147 | order_id = order_id, |
| | 1148 | grn_id = grn_id, |
| | 1149 | waybill_id = a_waybill_id, |
| | 1150 | invoice_id = invoice_id, |
| | 1151 | item_id = item_id, |
| | 1152 | quantity_pr = quantity_pr, |
| | 1153 | quantity_po = quantity_po, |
| | 1154 | quantity_grn = quantity_grn, |
| | 1155 | quantity_waybill = a_quantity_waybill, |
| | 1156 | unit_id = unit_id, |
| | 1157 | budgeted_unit_price = budgeted_unit_price, |
| | 1158 | actual_unit_price = actual_unit_price, |
| | 1159 | actual_total_price = actual_total_price, |
| | 1160 | comments = comments) |
| | 1161 | |
| | 1162 | if b_waybill_id <> None: |
| | 1163 | db.ot_line_item.insert( request_id = request_id, |
| | 1164 | line = line , |
| | 1165 | status = status, |
| | 1166 | order_id = order_id, |
| | 1167 | grn_id = grn_id, |
| | 1168 | waybill_id = b_waybill_id, |
| | 1169 | invoice_id = invoice_id, |
| | 1170 | item_id = item_id, |
| | 1171 | quantity_pr = quantity_pr, |
| | 1172 | quantity_po = quantity_po, |
| | 1173 | quantity_grn = quantity_grn, |
| | 1174 | quantity_waybill = b_quantity_waybill, |
| | 1175 | unit_id = unit_id, |
| | 1176 | budgeted_unit_price = budgeted_unit_price, |
| | 1177 | actual_unit_price = actual_unit_price, |
| | 1178 | actual_total_price = actual_total_price, |
| | 1179 | comments = comments) |
| | 1180 | |
| | 1181 | if c_waybill_id <> None: |
| | 1182 | db.ot_line_item.insert( request_id = request_id, |
| | 1183 | line = line , |
| | 1184 | status = status, |
| | 1185 | order_id = order_id, |
| | 1186 | grn_id = grn_id, |
| | 1187 | waybill_id = c_waybill_id, |
| | 1188 | invoice_id = invoice_id, |
| | 1189 | item_id = item_id, |
| | 1190 | quantity_pr = quantity_pr, |
| | 1191 | quantity_po = quantity_po, |
| | 1192 | quantity_grn = quantity_grn, |
| | 1193 | quantity_waybill = c_quantity_waybill, |
| | 1194 | unit_id = unit_id, |
| | 1195 | budgeted_unit_price = budgeted_unit_price, |
| | 1196 | actual_unit_price = actual_unit_price, |
| | 1197 | actual_total_price = actual_total_price, |
| | 1198 | comments = comments) |
| | 1199 | |
| | 1200 | return SQLTABLE(db(db.ot_line_item.id<>0).select()) |
| | 1201 | |
| | 1202 | def clean_excel_data(): |
| | 1203 | return dict(message="beginning import") |
| | 1204 | |
| | 1205 | def lookups_menu(): |
| | 1206 | resource = 'lookups' |
| | 1207 | response.title = LOOKUPS |
| | 1208 | s3.sub_options = [ |
| | 1209 | [T('Item Categories'), 'mat', 'category', T('')], |
| | 1210 | [T('Item Sub-Categories'), 'mat', 'sub_category', T('')], |
| | 1211 | [T('Units'), 'mat', 'unit', T('') ], |
| | 1212 | ] |
| | 1213 | s3.sub_options_menu = [] |
| | 1214 | for option in s3.sub_options: |
| | 1215 | label = option[0] |
| | 1216 | controller = option[1] |
| | 1217 | function = option[2] |
| | 1218 | s3.sub_options_menu.append([label, False, URL(request.application, controller, function)],) |
| | 1219 | response.menu.insert(2, ['>> ' + str(response.title), False, URL(request.application, module, resource), s3.sub_options_menu]) |
| | 1220 | return |
| | 1221 | }}} |