Package dbf :: Module ver_33
[hide private]
[frames] | no frames]

Source Code for Module dbf.ver_33

   1  """ 
   2  ========= 
   3  Copyright 
   4  ========= 
   5      - Portions copyright: 2008-2012 Ad-Mail, Inc -- All rights reserved. 
   6      - Portions copyright: 2012-2013 Ethan Furman -- All rights reserved. 
   7      - Author: Ethan Furman 
   8      - Contact: ethan@stoneleaf.us 
   9   
  10  Redistribution and use in source and binary forms, with or without 
  11  modification, are permitted provided that the following conditions are met: 
  12      - Redistributions of source code must retain the above copyright 
  13        notice, this list of conditions and the following disclaimer. 
  14      - Redistributions in binary form must reproduce the above copyright 
  15        notice, this list of conditions and the following disclaimer in the 
  16        documentation and/or other materials provided with the distribution. 
  17      - Neither the name of Ad-Mail, Inc nor the 
  18        names of its contributors may be used to endorse or promote products 
  19        derived from this software without specific prior written permission. 
  20   
  21  THIS SOFTWARE IS PROVIDED ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, 
  22  INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY 
  23  AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL 
  24  ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
  25  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
  26  PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
  27  OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
  28  WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
  29  OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
  30  ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
  31  """ 
  32   
  33  import codecs 
  34  import collections 
  35  import csv 
  36  import datetime 
  37  import os 
  38  import struct 
  39  import sys 
  40  import time 
  41  import weakref 
  42   
  43  from array import array 
  44  from bisect import bisect_left, bisect_right 
  45  from collections import defaultdict 
  46  from decimal import Decimal 
  47  from enum import Enum 
  48  from glob import glob 
  49  from math import floor 
  50  from os import SEEK_SET, SEEK_CUR, SEEK_END 
  51  import types 
  52   
  53  module = globals() 
  54   
  55  NoneType = type(None) 
  56   
  57  # Flag for behavior if bad data is encountered in a logical field 
  58  # Return None if True, else raise BadDataError 
  59  LOGICAL_BAD_IS_NONE = True 
  60   
  61  # treat non-unicode data as ... 
  62  input_decoding = 'ascii' 
  63   
  64  # if no codepage specified on dbf creation, use this 
  65  default_codepage = 'ascii' 
  66   
  67  # default format if none specified 
  68  default_type = 'db3' 
  69   
  70  temp_dir = os.environ.get("DBF_TEMP") or os.environ.get("TMP") or os.environ.get("TEMP") or "" 
  71   
  72  # signature:_meta of template records 
  73  _Template_Records = dict() 
  74   
  75  # dec jan feb mar apr may jun jul aug sep oct nov dec jan 
  76  days_per_month = [31, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31, 31] 
  77  days_per_leap_month = [31, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31, 31] 
78 79 -class Enum(Enum):
80 "adds 'export_to()' function" 81 @classmethod
82 - def export_to(cls, namespace):
83 namespace.update(cls.__members__)
84
85 -class IntEnum(int, Enum):
86 pass
87
88 -class HexEnum(IntEnum):
89 "repr is in hex"
90 - def __repr__(self):
91 return '<%s.%s: %#02x>' % ( 92 self.__class__.__name__, 93 self._name_, 94 self._value_, 95 )
96
97 -class ValueAliasEnum(Enum):
98 "allows value aliases (not name aliases)"
99 - def __new__(cls, int_value, *value_aliases):
100 obj = object.__new__(cls) 101 obj._value_ = int_value 102 for alias in value_aliases: 103 cls._value2member_map_[alias] = obj 104 return obj
105
106 -class AutoEnum(IntEnum):
107 """ 108 Automatically numbers enum members starting from __number__ (defaults to 0). 109 110 Includes support for a custom docstring per member. 111 """ 112 __number__ = 0 113
114 - def __new__(cls, *args):
115 """Ignores arguments (will be handled in __init__.""" 116 value = cls.__number__ 117 cls.__number__ += 1 118 obj = int.__new__(cls, value) 119 obj._value_ = value 120 return obj
121
122 - def __init__(self, *args):
123 """Can handle 0 or 1 argument; more requires a custom __init__. 124 125 0 = auto-number w/o docstring 126 1 = auto-number w/ docstring 127 2+ = needs custom __init__ 128 129 """ 130 if len(args) == 1 and isinstance(args[0], str): 131 self.__doc__ = args[0] 132 elif args: 133 raise TypeError('%s not dealt with -- need custom __init__' % (args,))
134
135 136 -class IsoDay(IntEnum):
137 MONDAY = 1 138 TUESDAY = 2 139 WEDNESDAY = 3 140 THURSDAY = 4 141 FRIDAY = 5 142 SATURDAY = 6 143 SUNDAY = 7 144
145 - def next_delta(self, day):
146 """Return number of days needed to get from self to day.""" 147 if self == day: 148 return 7 149 delta = day - self 150 if delta < 0: 151 delta += 7 152 return delta
153
154 - def last_delta(self, day):
155 """Return number of days needed to get from self to day.""" 156 if self == day: 157 return -7 158 delta = day - self 159 if delta > 0: 160 delta -= 7 161 return delta
162
163 -class RelativeDay(Enum):
164 LAST_SUNDAY = () 165 LAST_SATURDAY = () 166 LAST_FRIDAY = () 167 LAST_THURSDAY = () 168 LAST_WEDNESDAY = () 169 LAST_TUESDAY = () 170 LAST_MONDAY = () 171 NEXT_MONDAY = () 172 NEXT_TUESDAY = () 173 NEXT_WEDNESDAY = () 174 NEXT_THURSDAY = () 175 NEXT_FRIDAY = () 176 NEXT_SATURDAY = () 177 NEXT_SUNDAY = () 178
179 - def __new__(cls):
180 result = object.__new__(cls) 181 result._value = len(cls.__members__) + 1 182 return result
183
184 - def days_from(self, day):
185 target = IsoDay[self.name[5:]] 186 if self.name[:4] == 'LAST': 187 return day.last_delta(target) 188 return day.next_delta(target)
189 RelativeDay.export_to(module)
190 191 -class IsoMonth(IntEnum):
192 JANUARY = 1 193 FEBRUARY = 2 194 MARCH = 3 195 APRIL = 4 196 MAY = 5 197 JUNE = 6 198 JULY = 7 199 AUGUST = 8 200 SEPTEMBER = 9 201 OCTOBER = 10 202 NOVEMBER = 11 203 DECEMBER = 12 204
205 - def next_delta(self, month):
206 """Return number of months needed to get from self to month.""" 207 if self == month: 208 return 12 209 delta = month - self 210 if delta < 0: 211 delta += 12 212 return delta
213
214 - def last_delta(self, month):
215 """Return number of months needed to get from self to month.""" 216 if self == month: 217 return -12 218 delta = month - self 219 if delta > 0: 220 delta -= 12 221 return delta
222
223 -class RelativeMonth(Enum):
224 LAST_DECEMBER = () 225 LAST_NOVEMBER = () 226 LAST_OCTOBER = () 227 LAST_SEPTEMBER = () 228 LAST_AUGUST = () 229 LAST_JULY = () 230 LAST_JUNE = () 231 LAST_MAY = () 232 LAST_APRIL = () 233 LAST_MARCH= () 234 LAST_FEBRUARY = () 235 LAST_JANUARY = () 236 NEXT_JANUARY = () 237 NEXT_FEBRUARY = () 238 NEXT_MARCH = () 239 NEXT_APRIL = () 240 NEXT_MAY = () 241 NEXT_JUNE = () 242 NEXT_JULY = () 243 NEXT_AUGUST = () 244 NEXT_SEPTEMBER = () 245 NEXT_OCTOBER = () 246 NEXT_NOVEMBER = () 247 NEXT_DECEMBER = () 248
249 - def __new__(cls):
250 result = object.__new__(cls) 251 result._value = len(cls.__members__) + 1 252 return result
253
254 - def months_from(self, month):
255 target = IsoMonth[self.name[5:]] 256 if self.name[:4] == 'LAST': 257 return month.last_delta(target) 258 return month.next_delta(target)
259 RelativeMonth.export_to(module)
260 261 -def is_leapyear(year):
262 if year % 400 == 0: 263 return True 264 elif year % 100 == 0: 265 return False 266 elif year % 4 == 0: 267 return True 268 else: 269 return False
270
271 272 # Constants 273 274 -class LatinByte(HexEnum):
275 NULL = 0x00 276 LF = 0x0a 277 CR = 0x0d 278 EOF = 0x1a 279 ESC = 0x1b 280 SPACE = 0x20 281 ASTERISK = 0x2a
282 LatinByte.export_to(module)
283 284 -class FieldType(IntEnum):
285 - def __new__(cls, char):
286 int_value = ord(char) 287 obj = int.__new__(cls, int_value) 288 obj._value_ = int_value 289 obj.symbol = char.upper() 290 for alias in ( 291 char.lower(), 292 char.upper(), 293 ): 294 cls._value2member_map_[alias] = obj 295 cls._value2member_map_[alias.encode('ascii')] = obj 296 return obj
297 - def __repr__(self):
298 return '<%s.%s: %r>' % ( 299 self.__class__.__name__, 300 self._name_, 301 bytes([self._value_]), 302 )
303 _NULLFLAG = '0' 304 CHAR = 'C' 305 CURRENCY = 'Y' 306 DATE = 'D' 307 DATETIME = 'T' 308 DOUBLE = 'B' 309 FLOAT = 'F' 310 GENERAL = 'G' 311 INTEGER = 'I' 312 LOGICAL = 'L' 313 MEMO = 'M' 314 NUMERIC = 'N' 315 PICTURE = 'P'
316 FieldType.export_to(module)
317 318 -class FieldFlag(IntEnum):
319 @classmethod
320 - def lookup(cls, alias):
321 alias = alias.lower() 322 if alias in ('system', ): 323 return cls.SYSTEM 324 elif alias in ('null', 'nullable'): 325 return cls.NULLABLE 326 elif alias in ('binary', 'nocptrans'): 327 return cls.BINARY 328 else: 329 raise ValueError('no FieldFlag %r' % alias)
330 @property
331 - def text(self):
332 if self is NULLABLE: 333 return 'null' 334 else: 335 return self._name_.lower()
336 SYSTEM = 0x01 337 NULLABLE = 0x02 338 BINARY = 0x04 339 NOCPTRANS = 0x04
340 #AUTOINC = 0x0c # not currently supported (not vfp 6) 341 FieldFlag.export_to(module)
342 343 -class Field(AutoEnum):
344 __order__ = 'TYPE START LENGTH END DECIMALS FLAGS CLASS EMPTY NUL' 345 TYPE = "Char, Date, Logical, etc." 346 START = "Field offset in record" 347 LENGTH = "Length of field in record" 348 END = "End of field in record (exclusive)" 349 DECIMALS = "Number of decimal places if numeric" 350 FLAGS = "System, Binary, Nullable" 351 CLASS = "python class type" 352 EMPTY = "python function for empty field" 353 NUL = "python function for null field"
354 Field.export_to(module)
355 356 -class DbfLocation(AutoEnum):
357 __order__ = 'IN_MEMORY ON_DISK' 358 IN_MEMORY = "dbf is kept in memory (disappears at program end)" 359 ON_DISK = "dbf is kept on disk"
360 DbfLocation.export_to(module)
361 362 -class DbfStatus(AutoEnum):
363 __order__ = 'CLOSED READ_ONLY READ_WRITE' 364 CLOSED = 'closed (only meta information available)' 365 READ_ONLY = 'read-only' 366 READ_WRITE = 'read-write'
367 DbfStatus.export_to(module)
368 369 -class LazyAttr:
370 """ 371 doesn't create object until actually accessed 372 """ 373
374 - def __init__(yo, func=None, doc=None):
375 yo.fget = func 376 yo.__doc__ = doc or func.__doc__
377
378 - def __call__(yo, func):
379 yo.fget = func
380
381 - def __get__(yo, instance, owner):
382 if instance is None: 383 return yo 384 return yo.fget(instance)
385
386 387 -class MutableDefault:
388 """ 389 Lives in the class, and on first access calls the supplied factory and 390 maps the result into the instance it was called on 391 """ 392
393 - def __init__(self, func):
394 self._name = func.__name__ 395 self.func = func
396
397 - def __call__(self):
398 return self
399
400 - def __get__(self, instance, owner):
401 result = self.func() 402 if instance is not None: 403 setattr(instance, self._name, result) 404 return result
405
406 - def __repr__(self):
407 result = self.func() 408 return "MutableDefault(%r)" % (result, )
409
410 411 -def none(*args, **kwargs):
412 """ 413 because we can't do `NoneType(*args, **kwargs)` 414 """ 415 return None
416
417 418 # warnings and errors 419 420 -class DbfError(Exception):
421 """ 422 Fatal errors elicit this response. 423 """
424 - def __init__(self, message, *args):
425 Exception.__init__(self, message, *args) 426 self.message = message
427
428 429 -class DataOverflowError(DbfError):
430 """ 431 Data too large for field 432 """ 433
434 - def __init__(self, message, data=None):
435 DbfError.__init__(self, message) 436 self.data = data
437
438 439 -class BadDataError(DbfError):
440 """ 441 bad data in table 442 """ 443
444 - def __init__(self, message, data=None):
445 DbfError.__init__(self, message) 446 self.data = data
447
448 449 -class FieldMissingError(KeyError, DbfError):
450 """ 451 Field does not exist in table 452 """ 453
454 - def __init__(self, fieldname):
455 KeyError.__init__(self, '%s: no such field in table' % fieldname) 456 DbfError.__init__(self, '%s: no such field in table' % fieldname) 457 self.data = fieldname
458
459 460 -class FieldSpecError(DbfError, ValueError):
461 """ 462 invalid field specification 463 """ 464
465 - def __init__(self, message):
466 ValueError.__init__(self, message) 467 DbfError.__init__(self, message)
468
469 470 -class NonUnicodeError(DbfError):
471 """ 472 Data for table not in unicode 473 """ 474
475 - def __init__(self, message=None):
477
478 479 -class NotFoundError(DbfError, ValueError, KeyError, IndexError):
480 """ 481 record criteria not met 482 """ 483
484 - def __init__(self, message=None, data=None):
485 ValueError.__init__(self, message) 486 KeyError.__init__(self, message) 487 IndexError.__init__(self, message) 488 DbfError.__init__(self, message) 489 self.data = data
490
491 492 -class DbfWarning(Exception):
493 """ 494 Normal operations elicit this response 495 """
496
497 498 -class Eof(DbfWarning, StopIteration):
499 """ 500 End of file reached 501 """ 502 503 message = 'End of file reached' 504
505 - def __init__(self):
506 StopIteration.__init__(self, self.message) 507 DbfWarning.__init__(self, self.message)
508
509 510 -class Bof(DbfWarning, StopIteration):
511 """ 512 Beginning of file reached 513 """ 514 515 message = 'Beginning of file reached' 516
517 - def __init__(self):
518 StopIteration.__init__(self, self.message) 519 DbfWarning.__init__(self, self.message)
520
521 522 -class DoNotIndex(DbfWarning):
523 """ 524 Returned by indexing functions to suppress a record from becoming part of the index 525 """ 526 527 message = 'Not indexing record' 528
529 - def __init__(self):
530 DbfWarning.__init__(self, self.message)
531 532 533 # wrappers around datetime and logical objects to allow null values 534 535 # gets replaced later by their final values 536 Unknown = Other = object()
537 538 -class NullType:
539 """ 540 Null object -- any interaction returns Null 541 """ 542
543 - def _null(self, *args, **kwargs):
544 return self
545 546 __eq__ = __ne__ = __ge__ = __gt__ = __le__ = __lt__ = _null 547 __add__ = __iadd__ = __radd__ = _null 548 __sub__ = __isub__ = __rsub__ = _null 549 __mul__ = __imul__ = __rmul__ = _null 550 __div__ = __idiv__ = __rdiv__ = _null 551 __mod__ = __imod__ = __rmod__ = _null 552 __pow__ = __ipow__ = __rpow__ = _null 553 __and__ = __iand__ = __rand__ = _null 554 __xor__ = __ixor__ = __rxor__ = _null 555 __or__ = __ior__ = __ror__ = _null 556 __truediv__ = __itruediv__ = __rtruediv__ = _null 557 __floordiv__ = __ifloordiv__ = __rfloordiv__ = _null 558 __lshift__ = __ilshift__ = __rlshift__ = _null 559 __rshift__ = __irshift__ = __rrshift__ = _null 560 __neg__ = __pos__ = __abs__ = __invert__ = _null 561 __call__ = __getattr__ = _null 562
563 - def __divmod__(self, other):
564 return self, self
565 __rdivmod__ = __divmod__ 566
567 - def __hash__(self):
568 raise TypeError("unhashable type: 'Null'")
569
570 - def __new__(cls, *args):
571 return cls.null
572
573 - def __bool__(self):
574 return False
575
576 - def __repr__(self):
577 return '<null>'
578
579 - def __setattr__(self, name, value):
580 return None
581
582 - def __setitem___(self, index, value):
583 return None
584
585 - def __str__(self):
586 return ''
587 588 NullType.null = object.__new__(NullType) 589 Null = NullType()
590 591 592 -class Vapor:
593 """ 594 used in Vapor Records -- compares unequal with everything 595 """ 596
597 - def __eq__(self, other):
598 return False
599
600 - def __ne__(self, other):
601 return True
602 603 Vapor = Vapor()
604 605 606 -class Char(str):
607 """ 608 Strips trailing whitespace, and ignores trailing whitespace for comparisons 609 """ 610
611 - def __new__(cls, text=''):
612 if not isinstance(text, (basestring, cls)): 613 raise ValueError("Unable to automatically coerce %r to Char" % text) 614 result = str.__new__(cls, text.rstrip()) 615 result.field_size = len(text) 616 return result
617 618 __hash__ = str.__hash__ 619
620 - def __eq__(self, other):
621 """ 622 ignores trailing whitespace 623 """ 624 if not isinstance(other, (self.__class__, basestring)): 625 return NotImplemented 626 return str(self) == other.rstrip()
627
628 - def __ge__(self, other):
629 """ 630 ignores trailing whitespace 631 """ 632 if not isinstance(other, (self.__class__, basestring)): 633 return NotImplemented 634 return str(self) >= other.rstrip()
635
636 - def __gt__(self, other):
637 """ 638 ignores trailing whitespace 639 """ 640 if not isinstance(other, (self.__class__, basestring)): 641 return NotImplemented 642 return str(self) > other.rstrip()
643
644 - def __le__(self, other):
645 """ 646 ignores trailing whitespace 647 """ 648 if not isinstance(other, (self.__class__, basestring)): 649 return NotImplemented 650 return str(self) <= other.rstrip()
651
652 - def __lt__(self, other):
653 """ 654 ignores trailing whitespace 655 """ 656 if not isinstance(other, (self.__class__, basestring)): 657 return NotImplemented 658 return str(self) < other.rstrip()
659
660 - def __ne__(self, other):
661 """ 662 ignores trailing whitespace 663 """ 664 if not isinstance(other, (self.__class__, basestring)): 665 return NotImplemented 666 return str(self) != other.rstrip()
667
668 - def __bool__(self):
669 """ 670 ignores trailing whitespace 671 """ 672 return bool(str(self))
673
674 - def __add__(self, other):
675 result = self.__class__(str(self) + other) 676 result.field_size = self.field_size 677 return result
678 679 baseinteger = int 680 basestring = str, Char
681 682 -class Date:
683 """ 684 adds null capable datetime.date constructs 685 """ 686 687 __slots__ = ['_date'] 688
689 - def __new__(cls, year=None, month=0, day=0):
690 """ 691 date should be either a datetime.date or date/month/day should 692 all be appropriate integers 693 """ 694 if year is None or year is Null: 695 return cls._null_date 696 nd = object.__new__(cls) 697 if isinstance(year, basestring): 698 return Date.strptime(year) 699 elif isinstance(year, (datetime.date)): 700 nd._date = year 701 elif isinstance(year, (Date)): 702 nd._date = year._date 703 else: 704 nd._date = datetime.date(year, month, day) 705 return nd
706
707 - def __add__(self, other):
708 if self and isinstance(other, (datetime.timedelta)): 709 return Date(self._date + other) 710 else: 711 return NotImplemented
712
713 - def __eq__(self, other):
714 if isinstance(other, self.__class__): 715 return self._date == other._date 716 if isinstance(other, datetime.date): 717 return self._date == other 718 if isinstance(other, type(None)): 719 return self._date is None 720 return NotImplemented
721
722 - def __format__(self, spec):
723 if self: 724 return self._date.__format__(spec) 725 return ''
726
727 - def __getattr__(self, name):
728 if name == '_date': 729 raise AttributeError('_date missing!') 730 elif self: 731 return getattr(self._date, name) 732 else: 733 raise AttributeError('NullDate object has no attribute %s' % name)
734
735 - def __ge__(self, other):
736 if isinstance(other, (datetime.date)): 737 return self._date >= other 738 elif isinstance(other, (Date)): 739 if other: 740 return self._date >= other._date 741 return False 742 return NotImplemented
743
744 - def __gt__(self, other):
745 if isinstance(other, (datetime.date)): 746 return self._date > other 747 elif isinstance(other, (Date)): 748 if other: 749 return self._date > other._date 750 return True 751 return NotImplemented
752
753 - def __hash__(self):
754 return hash(self._date)
755
756 - def __le__(self, other):
757 if self: 758 if isinstance(other, (datetime.date)): 759 return self._date <= other 760 elif isinstance(other, (Date)): 761 if other: 762 return self._date <= other._date 763 return False 764 else: 765 if isinstance(other, (datetime.date)): 766 return True 767 elif isinstance(other, (Date)): 768 if other: 769 return True 770 return True 771 return NotImplemented
772
773 - def __lt__(self, other):
774 if self: 775 if isinstance(other, (datetime.date)): 776 return self._date < other 777 elif isinstance(other, (Date)): 778 if other: 779 return self._date < other._date 780 return False 781 else: 782 if isinstance(other, (datetime.date)): 783 return True 784 elif isinstance(other, (Date)): 785 if other: 786 return True 787 return False 788 return NotImplemented
789
790 - def __ne__(self, other):
791 if self: 792 if isinstance(other, (datetime.date)): 793 return self._date != other 794 elif isinstance(other, (Date)): 795 if other: 796 return self._date != other._date 797 return True 798 else: 799 if isinstance(other, (datetime.date)): 800 return True 801 elif isinstance(other, (Date)): 802 if other: 803 return True 804 return False 805 return NotImplemented
806
807 - def __bool__(self):
808 return self._date is not None
809 810 __radd__ = __add__ 811
812 - def __rsub__(self, other):
813 if self and isinstance(other, (datetime.date)): 814 return other - self._date 815 elif self and isinstance(other, (Date)): 816 return other._date - self._date 817 elif self and isinstance(other, (datetime.timedelta)): 818 return Date(other - self._date) 819 else: 820 return NotImplemented
821
822 - def __repr__(self):
823 if self: 824 return "Date(%d, %d, %d)" % self.timetuple()[:3] 825 else: 826 return "Date()"
827
828 - def __str__(self):
829 if self: 830 return str(self._date) 831 return ""
832
833 - def __sub__(self, other):
834 if self and isinstance(other, (datetime.date)): 835 return self._date - other 836 elif self and isinstance(other, (Date)): 837 return self._date - other._date 838 elif self and isinstance(other, (datetime.timedelta)): 839 return Date(self._date - other) 840 else: 841 return NotImplemented
842
843 - def date(self):
844 if self: 845 return self._date 846 return None
847 848 @classmethod
849 - def fromordinal(cls, number):
850 if number: 851 return cls(datetime.date.fromordinal(number)) 852 return cls()
853 854 @classmethod
855 - def fromtimestamp(cls, timestamp):
856 return cls(datetime.date.fromtimestamp(timestamp))
857 858 @classmethod
859 - def fromymd(cls, yyyymmdd):
860 if yyyymmdd in ('', ' ', 'no date'): 861 return cls() 862 return cls(datetime.date(int(yyyymmdd[:4]), int(yyyymmdd[4:6]), int(yyyymmdd[6:])))
863
864 - def replace(self, year=None, month=None, day=None, delta_year=0, delta_month=0, delta_day=0):
865 if not self: 866 return self.__class__._null_date 867 old_year, old_month, old_day = self.timetuple()[:3] 868 if isinstance(month, RelativeMonth): 869 this_month = IsoMonth(old_month) 870 delta_month += month.months_from(this_month) 871 month = None 872 if isinstance(day, RelativeDay): 873 this_day = IsoDay(self.isoweekday()) 874 delta_day += day.days_from(this_day) 875 day = None 876 year = (year or old_year) + delta_year 877 month = (month or old_month) + delta_month 878 day = (day or old_day) + delta_day 879 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)] 880 while not(0 < month < 13) or not (0 < day <= days_in_month[month]): 881 while month < 1: 882 year -= 1 883 month = 12 + month 884 while month > 12: 885 year += 1 886 month = month - 12 887 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)] 888 while day < 1: 889 month -= 1 890 day = days_in_month[month] + day 891 if not 0 < month < 13: 892 break 893 while day > days_in_month[month]: 894 day = day - days_in_month[month] 895 month += 1 896 if not 0 < month < 13: 897 break 898 return Date(year, month, day)
899
900 - def strftime(self, format):
901 if self: 902 return self._date.strftime(format) 903 return ''
904 905 @classmethod
906 - def strptime(cls, date_string, format=None):
907 if format is not None: 908 return cls(*(time.strptime(date_string, format)[0:3])) 909 return cls(*(time.strptime(date_string, "%Y-%m-%d")[0:3]))
910 911 @classmethod
912 - def today(cls):
913 return cls(datetime.date.today())
914
915 - def ymd(self):
916 if self: 917 return "%04d%02d%02d" % self.timetuple()[:3] 918 else: 919 return ' '
920 921 Date.max = Date(datetime.date.max) 922 Date.min = Date(datetime.date.min) 923 Date._null_date = object.__new__(Date) 924 Date._null_date._date = None 925 NullDate = Date()
926 927 928 -class DateTime:
929 """ 930 adds null capable datetime.datetime constructs 931 """ 932 933 __slots__ = ['_datetime'] 934
935 - def __new__(cls, year=None, month=0, day=0, hour=0, minute=0, second=0, microsecond=0):
936 """year may be a datetime.datetime""" 937 if year is None or year is Null: 938 return cls._null_datetime 939 ndt = object.__new__(cls) 940 if isinstance(year, basestring): 941 return DateTime.strptime(year) 942 elif isinstance(year, DateTime): 943 ndt._datetime = year._datetime 944 elif isinstance(year, datetime.datetime): 945 microsecond = year.microsecond // 1000 * 1000 946 hour, minute, second = year.hour, year.minute, year.second 947 year, month, day = year.year, year.month, year.day 948 ndt._datetime = datetime.datetime(year, month, day, hour, minute, second, microsecond) 949 elif year is not None: 950 microsecond = microsecond // 1000 * 1000 951 ndt._datetime = datetime.datetime(year, month, day, hour, minute, second, microsecond) 952 return ndt
953
954 - def __add__(self, other):
955 if self and isinstance(other, (datetime.timedelta)): 956 return DateTime(self._datetime + other) 957 else: 958 return NotImplemented
959
960 - def __eq__(self, other):
961 if isinstance(other, self.__class__): 962 return self._datetime == other._datetime 963 if isinstance(other, datetime.date): 964 return self._datetime == other 965 if isinstance(other, type(None)): 966 return self._datetime is None 967 return NotImplemented
968
969 - def __format__(self, spec):
970 if self: 971 return self._datetime.__format__(spec) 972 return ''
973
974 - def __getattr__(self, name):
975 if name == '_datetime': 976 raise AttributeError('_datetime missing!') 977 elif self: 978 return getattr(self._datetime, name) 979 else: 980 raise AttributeError('NullDateTime object has no attribute %s' % name)
981
982 - def __ge__(self, other):
983 if self: 984 if isinstance(other, (datetime.datetime)): 985 return self._datetime >= other 986 elif isinstance(other, (DateTime)): 987 if other: 988 return self._datetime >= other._datetime 989 return False 990 else: 991 if isinstance(other, (datetime.datetime)): 992 return False 993 elif isinstance(other, (DateTime)): 994 if other: 995 return False 996 return True 997 return NotImplemented
998
999 - def __gt__(self, other):
1000 if self: 1001 if isinstance(other, (datetime.datetime)): 1002 return self._datetime > other 1003 elif isinstance(other, (DateTime)): 1004 if other: 1005 return self._datetime > other._datetime 1006 return True 1007 else: 1008 if isinstance(other, (datetime.datetime)): 1009 return False 1010 elif isinstance(other, (DateTime)): 1011 if other: 1012 return False 1013 return False 1014 return NotImplemented
1015
1016 - def __hash__(self):
1017 return self._datetime.__hash__()
1018
1019 - def __le__(self, other):
1020 if self: 1021 if isinstance(other, (datetime.datetime)): 1022 return self._datetime <= other 1023 elif isinstance(other, (DateTime)): 1024 if other: 1025 return self._datetime <= other._datetime 1026 return False 1027 else: 1028 if isinstance(other, (datetime.datetime)): 1029 return True 1030 elif isinstance(other, (DateTime)): 1031 if other: 1032 return True 1033 return True 1034 return NotImplemented
1035
1036 - def __lt__(self, other):
1037 if self: 1038 if isinstance(other, (datetime.datetime)): 1039 return self._datetime < other 1040 elif isinstance(other, (DateTime)): 1041 if other: 1042 return self._datetime < other._datetime 1043 return False 1044 else: 1045 if isinstance(other, (datetime.datetime)): 1046 return True 1047 elif isinstance(other, (DateTime)): 1048 if other: 1049 return True 1050 return False 1051 return NotImplemented
1052
1053 - def __ne__(self, other):
1054 if self: 1055 if isinstance(other, (datetime.datetime)): 1056 return self._datetime != other 1057 elif isinstance(other, (DateTime)): 1058 if other: 1059 return self._datetime != other._datetime 1060 return True 1061 else: 1062 if isinstance(other, (datetime.datetime)): 1063 return True 1064 elif isinstance(other, (DateTime)): 1065 if other: 1066 return True 1067 return False 1068 return NotImplemented
1069
1070 - def __bool__(self):
1071 return self._datetime is not None
1072 1073 __radd__ = __add__ 1074
1075 - def __rsub__(self, other):
1076 if self and isinstance(other, (datetime.datetime)): 1077 return other - self._datetime 1078 elif self and isinstance(other, (DateTime)): 1079 return other._datetime - self._datetime 1080 elif self and isinstance(other, (datetime.timedelta)): 1081 return DateTime(other - self._datetime) 1082 else: 1083 return NotImplemented
1084
1085 - def __repr__(self):
1086 if self: 1087 return "DateTime(%5d, %2d, %2d, %2d, %2d, %2d, %2d)" % ( 1088 self._datetime.timetuple()[:6] + (self._datetime.microsecond, ) 1089 ) 1090 else: 1091 return "DateTime()"
1092
1093 - def __str__(self):
1094 if self: 1095 return str(self._datetime) 1096 return ""
1097
1098 - def __sub__(self, other):
1099 if self and isinstance(other, (datetime.datetime)): 1100 return self._datetime - other 1101 elif self and isinstance(other, (DateTime)): 1102 return self._datetime - other._datetime 1103 elif self and isinstance(other, (datetime.timedelta)): 1104 return DateTime(self._datetime - other) 1105 else: 1106 return NotImplemented
1107 1108 @classmethod
1109 - def combine(cls, date, time):
1110 if Date(date) and Time(time): 1111 return cls(date.year, date.month, date.day, time.hour, time.minute, time.second, time.microsecond) 1112 return cls()
1113
1114 - def date(self):
1115 if self: 1116 return Date(self.year, self.month, self.day) 1117 return Date()
1118
1119 - def datetime(self):
1120 if self: 1121 return self._datetime 1122 return None
1123 1124 @classmethod
1125 - def fromordinal(cls, number):
1126 if number: 1127 return cls(datetime.datetime.fromordinal(number)) 1128 else: 1129 return cls()
1130 1131 @classmethod
1132 - def fromtimestamp(cls, timestamp):
1133 return DateTime(datetime.datetime.fromtimestamp(timestamp))
1134 1135 @classmethod
1136 - def now(cls):
1137 return cls(datetime.datetime.now())
1138
1139 - def replace(self, year=None, month=None, day=None, hour=None, minute=None, second=None, microsecond=None, 1140 delta_year=0, delta_month=0, delta_day=0, delta_hour=0, delta_minute=0, delta_second=0):
1141 if not self: 1142 return self.__class__._null_datetime 1143 old_year, old_month, old_day, old_hour, old_minute, old_second, old_micro = self.timetuple()[:7] 1144 if isinstance(month, RelativeMonth): 1145 this_month = IsoMonth(old_month) 1146 delta_month += month.months_from(this_month) 1147 month = None 1148 if isinstance(day, RelativeDay): 1149 this_day = IsoDay(self.isoweekday()) 1150 delta_day += day.days_from(this_day) 1151 day = None 1152 year = (year or old_year) + delta_year 1153 month = (month or old_month) + delta_month 1154 day = (day or old_day) + delta_day 1155 hour = (hour or old_hour) + delta_hour 1156 minute = (minute or old_minute) + delta_minute 1157 second = (second or old_second) + delta_second 1158 microsecond = microsecond or old_micro 1159 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)] 1160 while ( not (0 < month < 13) 1161 or not (0 < day <= days_in_month[month]) 1162 or not (0 <= hour < 24) 1163 or not (0 <= minute < 60) 1164 or not (0 <= second < 60) 1165 ): 1166 while month < 1: 1167 year -= 1 1168 month = 12 + month 1169 while month > 12: 1170 year += 1 1171 month = month - 12 1172 days_in_month = (days_per_month, days_per_leap_month)[is_leapyear(year)] 1173 while day < 1: 1174 month -= 1 1175 day = days_in_month[month] + day 1176 if not 0 < month < 13: 1177 break 1178 while day > days_in_month[month]: 1179 day = day - days_in_month[month] 1180 month += 1 1181 if not 0 < month < 13: 1182 break 1183 while hour < 1: 1184 day -= 1 1185 hour = 24 + hour 1186 while hour > 23: 1187 day += 1 1188 hour = hour - 24 1189 while minute < 0: 1190 hour -= 1 1191 minute = 60 + minute 1192 while minute > 59: 1193 hour += 1 1194 minute = minute - 60 1195 while second < 0: 1196 minute -= 1 1197 second = 60 + second 1198 while second > 59: 1199 minute += 1 1200 second = second - 60 1201 return DateTime(year, month, day, hour, minute, second, microsecond)
1202
1203 - def strftime(self, format):
1204 if self: 1205 return self._datetime.strftime(format) 1206 return ''
1207 1208 @classmethod
1209 - def strptime(cls, datetime_string, format=None):
1210 if format is not None: 1211 return cls(datetime.datetime.strptime(datetime_string, format)) 1212 try: 1213 return cls(datetime.datetime.strptime(datetime_string, "%Y-%m-%d %H:%M:%S.%f")) 1214 except ValueError: 1215 return cls(datetime.datetime.strptime(datetime_string, "%Y-%m-%d %H:%M:%S"))
1216
1217 - def time(self):
1218 if self: 1219 return Time(self.hour, self.minute, self.second, self.microsecond) 1220 return Time()
1221 1222 @classmethod
1223 - def utcnow(cls):
1224 return cls(datetime.datetime.utcnow())
1225 1226 @classmethod
1227 - def today(cls):
1228 return cls(datetime.datetime.today())
1229 1230 DateTime.max = DateTime(datetime.datetime.max) 1231 DateTime.min = DateTime(datetime.datetime.min) 1232 DateTime._null_datetime = object.__new__(DateTime) 1233 DateTime._null_datetime._datetime = None 1234 NullDateTime = DateTime()
1235 1236 1237 -class Time:
1238 """ 1239 adds null capable datetime.time constructs 1240 """ 1241 1242 __slots__ = ['_time'] 1243
1244 - def __new__(cls, hour=None, minute=0, second=0, microsecond=0):
1245 """ 1246 hour may be a datetime.time or a str(Time) 1247 """ 1248 if hour is None or hour is Null: 1249 return cls._null_time 1250 nt = object.__new__(cls) 1251 if isinstance(hour, basestring): 1252 hour = Time.strptime(hour) 1253 if isinstance(hour, Time): 1254 nt._time = hour._time 1255 elif isinstance(hour, (datetime.time)): 1256 microsecond = hour.microsecond // 1000 * 1000 1257 hour, minute, second = hour.hour, hour.minute, hour.second 1258 nt._time = datetime.time(hour, minute, second, microsecond) 1259 elif hour is not None: 1260 microsecond = microsecond // 1000 * 1000 1261 nt._time = datetime.time(hour, minute, second, microsecond) 1262 return nt
1263 1264
1265 - def __add__(self, other):
1266 if self and isinstance(other, (datetime.timedelta)): 1267 t = self._time 1268 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond) 1269 t += other 1270 return Time(t.hour, t.minute, t.second, t.microsecond) 1271 else: 1272 return NotImplemented
1273
1274 - def __eq__(self, other):
1275 if isinstance(other, self.__class__): 1276 return self._time == other._time 1277 if isinstance(other, datetime.time): 1278 return self._time == other 1279 if isinstance(other, type(None)): 1280 return self._time is None 1281 return NotImplemented
1282
1283 - def __format__(self, spec):
1284 if self: 1285 return self._time.__format__(spec) 1286 return ''
1287
1288 - def __getattr__(self, name):
1289 if name == '_time': 1290 raise AttributeError('_time missing!') 1291 elif self: 1292 return getattr(self._time, name) 1293 else: 1294 raise AttributeError('NullTime object has no attribute %s' % name)
1295
1296 - def __ge__(self, other):
1297 if self: 1298 if isinstance(other, (datetime.time)): 1299 return self._time >= other 1300 elif isinstance(other, (Time)): 1301 if other: 1302 return self._time >= other._time 1303 return False 1304 else: 1305 if isinstance(other, (datetime.time)): 1306 return False 1307 elif isinstance(other, (Time)): 1308 if other: 1309 return False 1310 return True 1311 return NotImplemented
1312
1313 - def __gt__(self, other):
1314 if self: 1315 if isinstance(other, (datetime.time)): 1316 return self._time > other 1317 elif isinstance(other, (DateTime)): 1318 if other: 1319 return self._time > other._time 1320 return True 1321 else: 1322 if isinstance(other, (datetime.time)): 1323 return False 1324 elif isinstance(other, (Time)): 1325 if other: 1326 return False 1327 return False 1328 return NotImplemented
1329
1330 - def __hash__(self):
1331 return self._datetime.__hash__()
1332
1333 - def __le__(self, other):
1334 if self: 1335 if isinstance(other, (datetime.time)): 1336 return self._time <= other 1337 elif isinstance(other, (Time)): 1338 if other: 1339 return self._time <= other._time 1340 return False 1341 else: 1342 if isinstance(other, (datetime.time)): 1343 return True 1344 elif isinstance(other, (Time)): 1345 if other: 1346 return True 1347 return True 1348 return NotImplemented
1349
1350 - def __lt__(self, other):
1351 if self: 1352 if isinstance(other, (datetime.time)): 1353 return self._time < other 1354 elif isinstance(other, (Time)): 1355 if other: 1356 return self._time < other._time 1357 return False 1358 else: 1359 if isinstance(other, (datetime.time)): 1360 return True 1361 elif isinstance(other, (Time)): 1362 if other: 1363 return True 1364 return False 1365 return NotImplemented
1366
1367 - def __ne__(self, other):
1368 if self: 1369 if isinstance(other, (datetime.time)): 1370 return self._time != other 1371 elif isinstance(other, (Time)): 1372 if other: 1373 return self._time != other._time 1374 return True 1375 else: 1376 if isinstance(other, (datetime.time)): 1377 return True 1378 elif isinstance(other, (Time)): 1379 if other: 1380 return True 1381 return False 1382 return NotImplemented
1383
1384 - def __bool__(self):
1385 return self._time is not None
1386 1387 __radd__ = __add__ 1388
1389 - def __rsub__(self, other):
1390 if self and isinstance(other, (Time, datetime.time)): 1391 t = self._time 1392 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond) 1393 other = datetime.datetime(2012, 6, 27, other.hour, other.minute, other.second, other.microsecond) 1394 other -= t 1395 return other 1396 else: 1397 return NotImplemented
1398
1399 - def __repr__(self):
1400 if self: 1401 return "Time(%d, %d, %d, %d)" % (self.hour, self.minute, self.second, self.microsecond) 1402 else: 1403 return "Time()"
1404
1405 - def __str__(self):
1406 if self: 1407 return str(self._time) 1408 return ""
1409
1410 - def __sub__(self, other):
1411 if self and isinstance(other, (Time, datetime.time)): 1412 t = self._time 1413 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond) 1414 o = datetime.datetime(2012, 6, 27, other.hour, other.minute, other.second, other.microsecond) 1415 return t - o 1416 elif self and isinstance(other, (datetime.timedelta)): 1417 t = self._time 1418 t = datetime.datetime(2012, 6, 27, t.hour, t.minute, t.second, t.microsecond) 1419 t -= other 1420 return Time(t.hour, t.minute, t.second, t.microsecond) 1421 else: 1422 return NotImplemented
1423 1424 @classmethod
1425 - def fromfloat(cls, num):
1426 "2.5 == 2 hours, 30 minutes, 0 seconds, 0 microseconds" 1427 if num < 0: 1428 raise ValueError("positive value required (got %r)" % num) 1429 if num == 0: 1430 return Time(0) 1431 hours = int(num) 1432 if hours: 1433 num = num % hours 1434 minutes = int(num * 60) 1435 if minutes: 1436 num = num * 60 % minutes 1437 else: 1438 num = num * 60 1439 seconds = int(num * 60) 1440 if seconds: 1441 num = num * 60 % seconds 1442 else: 1443 num = num * 60 1444 microseconds = int(num * 1000) 1445 return Time(hours, minutes, seconds, microseconds)
1446 1447 @staticmethod
1448 - def now():
1449 return DateTime.now().time()
1450
1451 - def replace(self, hour=None, minute=None, second=None, microsecond=None, delta_hour=0, delta_minute=0, delta_second=0):
1452 if not self: 1453 return self.__class__._null_time 1454 old_hour, old_minute, old_second, old_micro = self.hour, self.minute, self.second, self.microsecond 1455 hour = (hour or old_hour) + delta_hour 1456 minute = (minute or old_minute) + delta_minute 1457 second = (second or old_second) + delta_second 1458 microsecond = microsecond or old_micro 1459 while not (0 <= hour < 24) or not (0 <= minute < 60) or not (0 <= second < 60): 1460 while second < 0: 1461 minute -= 1 1462 second = 60 + second 1463 while second > 59: 1464 minute += 1 1465 second = second - 60 1466 while minute < 0: 1467 hour -= 1 1468 minute = 60 + minute 1469 while minute > 59: 1470 hour += 1 1471 minute = minute - 60 1472 while hour < 1: 1473 hour = 24 + hour 1474 while hour > 23: 1475 hour = hour - 24 1476 return Time(hour, minute, second, microsecond)
1477
1478 - def strftime(self, format):
1479 if self: 1480 return self._time.strftime(format) 1481 return ''
1482 1483 @classmethod
1484 - def strptime(cls, time_string, format=None):
1485 if format is not None: 1486 return cls(datetime.time.strptime(time_string, format)) 1487 try: 1488 return cls(datetime.time.strptime(time_string, "%H:%M:%S.%f")) 1489 except ValueError: 1490 return cls(datetime.time.strptime(time_string, "%H:%M:%S"))
1491
1492 - def time(self):
1493 if self: 1494 return self._time 1495 return None
1496
1497 - def tofloat(self):
1498 "returns Time as a float" 1499 hour = self.hour 1500 minute = self.minute * (1.0 / 60) 1501 second = self.second * (1.0 / 3600) 1502 microsecond = self.microsecond * (1.0 / 3600000) 1503 return hour + minute + second + microsecond
1504 1505 Time.max = Time(datetime.time.max) 1506 Time.min = Time(datetime.time.min) 1507 Time._null_time = object.__new__(Time) 1508 Time._null_time._time = None 1509 NullTime = Time()
1510 1511 1512 -class Period:
1513 "for matching various time ranges" 1514
1515 - def __init__(self, year=None, month=None, day=None, hour=None, minute=None, second=None, microsecond=None):
1516 params = vars() 1517 self._mask = {} 1518 for attr in ('year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond'): 1519 value = params[attr] 1520 if value is not None: 1521 self._mask[attr] = value
1522
1523 - def __contains__(self, other):
1524 if not self._mask: 1525 return True 1526 for attr, value in self._mask.items(): 1527 other_value = getattr(other, attr, None) 1528 try: 1529 if other_value == value or other_value in value: 1530 continue 1531 except TypeError: 1532 pass 1533 return False 1534 return True
1535
1536 - def __repr__(self):
1537 items = [] 1538 for attr in ('year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond'): 1539 if attr in self._mask: 1540 items.append('%s=%s' % (attr, self._mask[attr])) 1541 return "Period(%s)" % ', '.join(items)
1542
1543 1544 -class Logical:
1545 """ 1546 Logical field return type. 1547 1548 Accepts values of True, False, or None/Null. 1549 boolean value of Unknown is False (use Quantum if you want an exception instead. 1550 """ 1551
1552 - def __new__(cls, value=None):
1553 if value is None or value is Null or value is Other or value is Unknown: 1554 return cls.unknown 1555 elif isinstance(value, basestring): 1556 if value.lower() in ('t', 'true', 'y', 'yes', 'on'): 1557 return cls.true 1558 elif value.lower() in ('f', 'false', 'n', 'no', 'off'): 1559 return cls.false 1560 elif value.lower() in ('?', 'unknown', 'null', 'none', ' ', ''): 1561 return cls.unknown 1562 else: 1563 raise ValueError('unknown value for Logical: %s' % value) 1564 else: 1565 return (cls.false, cls.true)[bool(value)]
1566
1567 - def __add__(x, y):
1568 if isinstance(y, type(None)) or y is Unknown or x is Unknown: 1569 return Unknown 1570 try: 1571 i = int(y) 1572 except Exception: 1573 return NotImplemented 1574 return int(x) + i
1575 1576 __radd__ = __iadd__ = __add__ 1577
1578 - def __sub__(x, y):
1579 if isinstance(y, type(None)) or y is Unknown or x is Unknown: 1580 return Unknown 1581 try: 1582 i = int(y) 1583 except Exception: 1584 return NotImplemented 1585 return int(x) - i
1586 1587 __isub__ = __sub__ 1588
1589 - def __rsub__(y, x):
1590 if isinstance(x, type(None)) or x is Unknown or y is Unknown: 1591 return Unknown 1592 try: 1593 i = int(x) 1594 except Exception: 1595 return NotImplemented 1596 return i - int(y)
1597
1598 - def __mul__(x, y):
1599 if x == 0 or y == 0: 1600 return 0 1601 elif isinstance(y, type(None)) or y is Unknown or x is Unknown: 1602 return Unknown 1603 try: 1604 i = int(y) 1605 except Exception: 1606 return NotImplemented 1607 return int(x) * i
1608 1609 __rmul__ = __imul__ = __mul__ 1610
1611 - def __div__(x, y):
1612 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown: 1613 return Unknown 1614 try: 1615 i = int(y) 1616 except Exception: 1617 return NotImplemented 1618 return int(x).__div__(i)
1619 1620 __idiv__ = __div__ 1621
1622 - def __rdiv__(y, x):
1623 if isinstance(x, type(None)) or y == 0 or x is Unknown or y is Unknown: 1624 return Unknown 1625 try: 1626 i = int(x) 1627 except Exception: 1628 return NotImplemented 1629 return i.__div__(int(y))
1630
1631 - def __truediv__(x, y):
1632 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown: 1633 return Unknown 1634 try: 1635 i = int(y) 1636 except Exception: 1637 return NotImplemented 1638 return int(x).__truediv__(i)
1639 1640 __itruediv__ = __truediv__ 1641
1642 - def __rtruediv__(y, x):
1643 if isinstance(x, type(None)) or y == 0 or y is Unknown or x is Unknown: 1644 return Unknown 1645 try: 1646 i = int(x) 1647 except Exception: 1648 return NotImplemented 1649 return i.__truediv__(int(y))
1650
1651 - def __floordiv__(x, y):
1652 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown: 1653 return Unknown 1654 try: 1655 i = int(y) 1656 except Exception: 1657 return NotImplemented 1658 return int(x).__floordiv__(i)
1659 1660 __ifloordiv__ = __floordiv__ 1661
1662 - def __rfloordiv__(y, x):
1663 if isinstance(x, type(None)) or y == 0 or y is Unknown or x is Unknown: 1664 return Unknown 1665 try: 1666 i = int(x) 1667 except Exception: 1668 return NotImplemented 1669 return i.__floordiv__(int(y))
1670
1671 - def __divmod__(x, y):
1672 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown: 1673 return (Unknown, Unknown) 1674 try: 1675 i = int(y) 1676 except Exception: 1677 return NotImplemented 1678 return divmod(int(x), i)
1679
1680 - def __rdivmod__(y, x):
1681 if isinstance(x, type(None)) or y == 0 or y is Unknown or x is Unknown: 1682 return (Unknown, Unknown) 1683 try: 1684 i = int(x) 1685 except Exception: 1686 return NotImplemented 1687 return divmod(i, int(y))
1688
1689 - def __mod__(x, y):
1690 if isinstance(y, type(None)) or y == 0 or y is Unknown or x is Unknown: 1691 return Unknown 1692 try: 1693 i = int(y) 1694 except Exception: 1695 return NotImplemented 1696 return int(x) % i
1697 1698 __imod__ = __mod__ 1699
1700 - def __rmod__(y, x):
1701 if isinstance(x, type(None)) or y == 0 or x is Unknown or y is Unknown: 1702 return Unknown 1703 try: 1704 i = int(x) 1705 except Exception: 1706 return NotImplemented 1707 return i % int(y)
1708
1709 - def __pow__(x, y):
1710 if not isinstance(y, (x.__class__, bool, type(None), baseinteger)): 1711 return NotImplemented 1712 if isinstance(y, type(None)) or y is Unknown: 1713 return Unknown 1714 i = int(y) 1715 if i == 0: 1716 return 1 1717 if x is Unknown: 1718 return Unknown 1719 return int(x) ** i
1720 1721 __ipow__ = __pow__ 1722
1723 - def __rpow__(y, x):
1724 if not isinstance(x, (y.__class__, bool, type(None), baseinteger)): 1725 return NotImplemented 1726 if y is Unknown: 1727 return Unknown 1728 i = int(y) 1729 if i == 0: 1730 return 1 1731 if x is Unknown or isinstance(x, type(None)): 1732 return Unknown 1733 return int(x) ** i
1734
1735 - def __lshift__(x, y):
1736 if isinstance(y, type(None)) or x is Unknown or y is Unknown: 1737 return Unknown 1738 return int(x.value) << int(y)
1739 1740 __ilshift__ = __lshift__ 1741
1742 - def __rlshift__(y, x):
1743 if isinstance(x, type(None)) or x is Unknown or y is Unknown: 1744 return Unknown 1745 return int(x) << int(y)
1746
1747 - def __rshift__(x, y):
1748 if isinstance(y, type(None)) or x is Unknown or y is Unknown: 1749 return Unknown 1750 return int(x.value) >> int(y)
1751 1752 __irshift__ = __rshift__ 1753
1754 - def __rrshift__(y, x):
1755 if isinstance(x, type(None)) or x is Unknown or y is Unknown: 1756 return Unknown 1757 return int(x) >> int(y)
1758
1759 - def __neg__(x):
1760 "NEG (negation)" 1761 if x in (Truth, Falsth): 1762 return -x.value 1763 return Unknown
1764
1765 - def __pos__(x):
1766 "POS (posation)" 1767 if x in (Truth, Falsth): 1768 return +x.value 1769 return Unknown
1770
1771 - def __abs__(x):
1772 if x in (Truth, Falsth): 1773 return abs(x.value) 1774 return Unknown
1775
1776 - def __invert__(x):
1777 if x in (Truth, Falsth): 1778 return ~x.value 1779 return Unknown
1780
1781 - def __complex__(x):
1782 if x.value is None: 1783 raise ValueError("unable to return complex() of %r" % x) 1784 return complex(x.value)
1785
1786 - def __int__(x):
1787 if x.value is None: 1788 raise ValueError("unable to return int() of %r" % x) 1789 return int(x.value)
1790
1791 - def __float__(x):
1792 if x.value is None: 1793 raise ValueError("unable to return float() of %r" % x) 1794 return float(x.value)
1795
1796 - def __and__(x, y):
1797 """ 1798 AND (conjunction) x & y: 1799 True iff both x, y are True 1800 False iff at least one of x, y is False 1801 Unknown otherwise 1802 """ 1803 if (isinstance(x, baseinteger) and not isinstance(x, bool)) or (isinstance(y, baseinteger) and not isinstance(y, bool)): 1804 if x == 0 or y == 0: 1805 return 0 1806 elif x is Unknown or y is Unknown: 1807 return Unknown 1808 return int(x) & int(y) 1809 elif x in (False, Falsth) or y in (False, Falsth): 1810 return Falsth 1811 elif x in (True, Truth) and y in (True, Truth): 1812 return Truth 1813 elif isinstance(x, type(None)) or isinstance(y, type(None)) or y is Unknown or x is Unknown: 1814 return Unknown 1815 return NotImplemented
1816 1817 __rand__ = __and__ 1818
1819 - def __or__(x, y):
1820 "OR (disjunction): x | y => True iff at least one of x, y is True" 1821 if (isinstance(x, baseinteger) and not isinstance(x, bool)) or (isinstance(y, baseinteger) and not isinstance(y, bool)): 1822 if x is Unknown or y is Unknown: 1823 return Unknown 1824 return int(x) | int(y) 1825 elif x in (True, Truth) or y in (True, Truth): 1826 return Truth 1827 elif x in (False, Falsth) and y in (False, Falsth): 1828 return Falsth 1829 elif isinstance(x, type(None)) or isinstance(y, type(None)) or y is Unknown or x is Unknown: 1830 return Unknown 1831 return NotImplemented
1832 1833 __ror__ = __or__ 1834
1835 - def __xor__(x, y):
1836 "XOR (parity) x ^ y: True iff only one of x,y is True" 1837 if (isinstance(x, baseinteger) and not isinstance(x, bool)) or (isinstance(y, baseinteger) and not isinstance(y, bool)): 1838 if x is Unknown or y is Unknown: 1839 return Unknown 1840 return int(x) ^ int(y) 1841 elif x in (True, Truth, False, Falsth) and y in (True, Truth, False, Falsth): 1842 return { 1843 (True, True) : Falsth, 1844 (True, False) : Truth, 1845 (False, True) : Truth, 1846 (False, False): Falsth, 1847 }[(x, y)] 1848 elif isinstance(x, type(None)) or isinstance(y, type(None)) or y is Unknown or x is Unknown: 1849 return Unknown 1850 return NotImplemented
1851 1852 __rxor__ = __xor__ 1853
1854 - def __bool__(x):
1855 "boolean value of Unknown is assumed False" 1856 return x.value is True
1857
1858 - def __eq__(x, y):
1859 if isinstance(y, x.__class__): 1860 return x.value == y.value 1861 elif isinstance(y, (bool, NoneType, baseinteger)): 1862 return x.value == y 1863 return NotImplemented
1864
1865 - def __ge__(x, y):
1866 if isinstance(y, type(None)) or x is Unknown or y is Unknown: 1867 return x.value == None 1868 elif isinstance(y, x.__class__): 1869 return x.value >= y.value 1870 elif isinstance(y, (bool, baseinteger)): 1871 return x.value >= y 1872 return NotImplemented
1873
1874 - def __gt__(x, y):
1875 if isinstance(y, type(None)) or x is Unknown or y is Unknown: 1876 return False 1877 elif isinstance(y, x.__class__): 1878 return x.value > y.value 1879 elif isinstance(y, (bool, baseinteger)): 1880 return x.value > y 1881 return NotImplemented
1882
1883 - def __le__(x, y):
1884 if isinstance(y, type(None)) or x is Unknown or y is Unknown: 1885 return x.value == None 1886 elif isinstance(y, x.__class__): 1887 return x.value <= y.value 1888 elif isinstance(y, (bool, baseinteger)): 1889 return x.value <= y 1890 return NotImplemented
1891
1892 - def __lt__(x, y):
1893 if isinstance(y, type(None)) or x is Unknown or y is Unknown: 1894 return False 1895 elif isinstance(y, x.__class__): 1896 return x.value < y.value 1897 elif isinstance(y, (bool, baseinteger)): 1898 return x.value < y 1899 return NotImplemented
1900
1901 - def __ne__(x, y):
1902 if isinstance(y, x.__class__): 1903 return x.value != y.value 1904 elif isinstance(y, (bool, type(None), baseinteger)): 1905 return x.value != y 1906 return NotImplemented
1907
1908 - def __hash__(x):
1909 return hash(x.value)
1910
1911 - def __index__(x):
1912 if x.value is None: 1913 raise ValueError("unable to return int() of %r" % x) 1914 return int(x.value)
1915
1916 - def __repr__(x):
1917 return "Logical(%r)" % x.string
1918
1919 - def __str__(x):
1920 return x.string
1921 1922 Logical.true = object.__new__(Logical) 1923 Logical.true.value = True 1924 Logical.true.string = 'T' 1925 Logical.false = object.__new__(Logical) 1926 Logical.false.value = False 1927 Logical.false.string = 'F' 1928 Logical.unknown = object.__new__(Logical) 1929 Logical.unknown.value = None 1930 Logical.unknown.string = '?' 1931 Truth = Logical(True) 1932 Falsth = Logical(False) 1933 Unknown = Logical()
1934 1935 1936 -class Quantum(object):
1937 """ 1938 Logical field return type that implements boolean algebra 1939 1940 Accepts values of True/On, False/Off, or None/Null/Unknown/Other 1941 """ 1942
1943 - def __new__(cls, value=None):
1944 if value is None or value is Null or value is Other or value is Unknown: 1945 return cls.unknown 1946 elif isinstance(value, basestring): 1947 if value.lower() in ('t', 'true', 'y', 'yes', 'on'): 1948 return cls.true 1949 elif value.lower() in ('f', 'false', 'n', 'no', 'off'): 1950 return cls.false 1951 elif value.lower() in ('?', 'unknown', 'null', 'none', ' ', ''): 1952 return cls.unknown 1953 else: 1954 raise ValueError('unknown value for Quantum: %s' % value) 1955 else: 1956 return (cls.false, cls.true)[bool(value)]
1957
1958 - def A(x, y):
1959 "OR (disjunction): x | y => True iff at least one of x, y is True" 1960 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 1961 return NotImplemented 1962 if x.value is True or y is not Other and y == True: 1963 return x.true 1964 elif x.value is False and y is not Other and y == False: 1965 return x.false 1966 return Other
1967
1968 - def _C_material(x, y):
1969 "IMP (material implication) x >> y => False iff x == True and y == False" 1970 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 1971 return NotImplemented 1972 if (x.value is False 1973 or (x.value is True and y is not Other and y == True)): 1974 return x.true 1975 elif x.value is True and y is not Other and y == False: 1976 return False 1977 return Other
1978
1979 - def _C_material_reversed(y, x):
1980 "IMP (material implication) x >> y => False iff x = True and y = False" 1981 if not isinstance(x, (y.__class__, bool, NullType, type(None))): 1982 return NotImplemented 1983 if (x is not Other and x == False 1984 or (x is not Other and x == True and y.value is True)): 1985 return y.true 1986 elif x is not Other and x == True and y.value is False: 1987 return y.false 1988 return Other
1989
1990 - def _C_relevant(x, y):
1991 "IMP (relevant implication) x >> y => True iff both x, y are True, False iff x == True and y == False, Other if x is False" 1992 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 1993 return NotImplemented 1994 if x.value is True and y is not Other and y == True: 1995 return x.true 1996 if x.value is True and y is not Other and y == False: 1997 return x.false 1998 return Other
1999
2000 - def _C_relevant_reversed(y, x):
2001 "IMP (relevant implication) x >> y => True iff both x, y are True, False iff x == True and y == False, Other if y is False" 2002 if not isinstance(x, (y.__class__, bool, NullType, type(None))): 2003 return NotImplemented 2004 if x is not Other and x == True and y.value is True: 2005 return y.true 2006 if x is not Other and x == True and y.value is False: 2007 return y.false 2008 return Other
2009
2010 - def D(x, y):
2011 "NAND (negative AND) x.D(y): False iff x and y are both True" 2012 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 2013 return NotImplemented 2014 if x.value is False or y is not Other and y == False: 2015 return x.true 2016 elif x.value is True and y is not Other and y == True: 2017 return x.false 2018 return Other
2019
2020 - def E(x, y):
2021 "EQV (equivalence) x.E(y): True iff x and y are the same" 2022 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 2023 return NotImplemented 2024 elif ( 2025 (x.value is True and y is not Other and y == True) 2026 or 2027 (x.value is False and y is not Other and y == False) 2028 ): 2029 return x.true 2030 elif ( 2031 (x.value is True and y is not Other and y == False) 2032 or 2033 (x.value is False and y is not Other and y == True) 2034 ): 2035 return x.false 2036 return Other
2037
2038 - def J(x, y):
2039 "XOR (parity) x ^ y: True iff only one of x,y is True" 2040 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 2041 return NotImplemented 2042 if ( 2043 (x.value is True and y is not Other and y == False) 2044 or 2045 (x.value is False and y is not Other and y == True) 2046 ): 2047 return x.true 2048 if ( 2049 (x.value is False and y is not Other and y == False) 2050 or 2051 (x.value is True and y is not Other and y == True) 2052 ): 2053 return x.false 2054 return Other
2055
2056 - def K(x, y):
2057 "AND (conjunction) x & y: True iff both x, y are True" 2058 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 2059 return NotImplemented 2060 if x.value is True and y is not Other and y == True: 2061 return x.true 2062 elif x.value is False or y is not Other and y == False: 2063 return x.false 2064 return Other
2065
2066 - def N(x):
2067 "NEG (negation) -x: True iff x = False" 2068 if x is x.true: 2069 return x.false 2070 elif x is x.false: 2071 return x.true 2072 return Other
2073 2074 @classmethod
2075 - def set_implication(cls, method):
2076 "sets IMP to material or relevant" 2077 if not isinstance(method, basestring) or method.lower() not in ('material', 'relevant'): 2078 raise ValueError("method should be 'material' (for strict boolean) or 'relevant', not %r'" % method) 2079 if method.lower() == 'material': 2080 cls.C = cls._C_material 2081 cls.__rshift__ = cls._C_material 2082 cls.__rrshift__ = cls._C_material_reversed 2083 elif method.lower() == 'relevant': 2084 cls.C = cls._C_relevant 2085 cls.__rshift__ = cls._C_relevant 2086 cls.__rrshift__ = cls._C_relevant_reversed
2087
2088 - def __eq__(x, y):
2089 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 2090 return NotImplemented 2091 if ( 2092 (x.value is True and y is not Other and y == True) 2093 or 2094 (x.value is False and y is not Other and y == False) 2095 ): 2096 return x.true 2097 elif ( 2098 (x.value is True and y is not Other and y == False) 2099 or 2100 (x.value is False and y is not Other and y == True) 2101 ): 2102 return x.false 2103 return Other
2104
2105 - def __hash__(x):
2106 return hash(x.value)
2107
2108 - def __ne__(x, y):
2109 if not isinstance(y, (x.__class__, bool, NullType, type(None))): 2110 return NotImplemented 2111 if ( 2112 (x.value is True and y is not Other and y == False) 2113 or 2114 (x.value is False and y is not Other and y == True) 2115 ): 2116 return x.true 2117 elif ( 2118 (x.value is True and y is not Other and y == True) 2119 or 2120 (x.value is False and y is not Other and y == False) 2121 ): 2122 return x.false 2123 return Other
2124
2125 - def __bool__(x):
2126 if x is Other: 2127 raise TypeError('True/False value of %r is unknown' % x) 2128 return x.value is True
2129
2130 - def __repr__(x):
2131 return "Quantum(%r)" % x.string
2132
2133 - def __str__(x):
2134 return x.string
2135 2136 __add__ = A 2137 __and__ = K 2138 __mul__ = K 2139 __neg__ = N 2140 __or__ = A 2141 __radd__ = A 2142 __rand__ = K 2143 __rshift__ = None 2144 __rmul__ = K 2145 __ror__ = A 2146 __rrshift__ = None 2147 __rxor__ = J 2148 __xor__ = J
2149 2150 Quantum.true = object.__new__(Quantum) 2151 Quantum.true.value = True 2152 Quantum.true.string = 'Y' 2153 Quantum.false = object.__new__(Quantum) 2154 Quantum.false.value = False 2155 Quantum.false.string = 'N' 2156 Quantum.unknown = object.__new__(Quantum) 2157 Quantum.unknown.value = None 2158 Quantum.unknown.string = '?' 2159 Quantum.set_implication('material') 2160 On = Quantum(True) 2161 Off = Quantum(False) 2162 Other = Quantum() 2163 2164 2165 # add xmlrpc support 2166 from xmlrpc.client import Marshaller 2167 Marshaller.dispatch[Char] = Marshaller.dump_unicode 2168 Marshaller.dispatch[Logical] = Marshaller.dump_bool 2169 Marshaller.dispatch[DateTime] = Marshaller.dump_datetime 2170 del Marshaller
2171 2172 # Internal classes 2173 2174 -class _Navigation(object):
2175 """ 2176 Navigation base class that provides VPFish movement methods 2177 """ 2178 2179 _index = -1 2180
2181 - def _nav_check(self):
2182 """ 2183 implemented by subclass; must return True if underlying structure meets need 2184 """ 2185 raise NotImplementedError()
2186
2187 - def _get_index(self, direction, n=1, start=None):
2188 """ 2189 returns index of next available record towards direction 2190 """ 2191 if start is not None: 2192 index = start 2193 else: 2194 index = self._index 2195 if direction == 'reverse': 2196 move = -1 * n 2197 limit = 0 2198 index += move 2199 if index < limit: 2200 return -1 2201 else: 2202 return index 2203 elif direction == 'forward': 2204 move = +1 * n 2205 limit = len(self) - 1 2206 index += move 2207 if index > limit: 2208 return len(self) 2209 else: 2210 return index 2211 else: 2212 raise ValueError("direction should be 'forward' or 'reverse', not %r" % direction)
2213 2214 @property
2215 - def bof(self):
2216 """ 2217 returns True if no more usable records towards the beginning of the table 2218 """ 2219 self._nav_check() 2220 index = self._get_index('reverse') 2221 return index == -1
2222
2223 - def bottom(self):
2224 """ 2225 sets record index to bottom of table (end of table) 2226 """ 2227 self._nav_check() 2228 self._index = len(self) 2229 return self._index
2230 2231 @property
2232 - def current_record(self):
2233 """ 2234 returns current record (deleted or not) 2235 """ 2236 self._nav_check() 2237 index = self._index 2238 if index < 0: 2239 return RecordVaporWare('bof', self) 2240 elif index >= len(self): 2241 return RecordVaporWare('eof', self) 2242 return self[index]
2243 2244 @property
2245 - def current(self):
2246 """ 2247 returns current index 2248 """ 2249 self._nav_check() 2250 return self._index
2251 2252 @property
2253 - def eof(self):
2254 """ 2255 returns True if no more usable records towards the end of the table 2256 """ 2257 self._nav_check() 2258 index = self._get_index('forward') 2259 return index == len(self)
2260 2261 @property
2262 - def first_record(self):
2263 """ 2264 returns first available record (does not move index) 2265 """ 2266 self._nav_check() 2267 index = self._get_index('forward', start=-1) 2268 if -1 < index < len(self): 2269 return self[index] 2270 else: 2271 return RecordVaporWare('bof', self)
2272
2273 - def goto(self, where):
2274 """ 2275 changes the record pointer to the first matching (deleted) record 2276 where should be either an integer, or 'top' or 'bottom'. 2277 top -> before first record 2278 bottom -> after last record 2279 """ 2280 self._nav_check() 2281 max = len(self) 2282 if isinstance(where, baseinteger): 2283 if not -max <= where < max: 2284 raise IndexError("Record %d does not exist" % where) 2285 if where < 0: 2286 where += max 2287 self._index = where 2288 return self._index 2289 move = getattr(self, where, None) 2290 if move is None: 2291 raise DbfError("unable to go to %r" % where) 2292 return move()
2293 2294 @property
2295 - def last_record(self):
2296 """ 2297 returns last available record (does not move index) 2298 """ 2299 self._nav_check() 2300 index = self._get_index('reverse', start=len(self)) 2301 if -1 < index < len(self): 2302 return self[index] 2303 else: 2304 return RecordVaporWare('bof', self)
2305 2306 @property
2307 - def next_record(self):
2308 """ 2309 returns next available record (does not move index) 2310 """ 2311 self._nav_check() 2312 index = self._get_index('forward') 2313 if -1 < index < len(self): 2314 return self[index] 2315 else: 2316 return RecordVaporWare('eof', self)
2317 2318 @property
2319 - def prev_record(self):
2320 """ 2321 returns previous available record (does not move index) 2322 """ 2323 self._nav_check() 2324 index = self._get_index('reverse') 2325 if -1 < index < len(self): 2326 return self[index] 2327 else: 2328 return RecordVaporWare('bof', self)
2329
2330 - def skip(self, n=1):
2331 """ 2332 move index to the next nth available record 2333 """ 2334 self._nav_check() 2335 if n < 0: 2336 n *= -1 2337 direction = 'reverse' 2338 else: 2339 direction = 'forward' 2340 self._index = index = self._get_index(direction, n) 2341 if index < 0: 2342 raise Bof() 2343 elif index >= len(self): 2344 raise Eof() 2345 else: 2346 return index
2347
2348 - def top(self):
2349 """ 2350 sets record index to top of table (beginning of table) 2351 """ 2352 self._nav_check() 2353 self._index = -1 2354 return self._index
2355
2356 2357 -class Record(object):
2358 """ 2359 Provides routines to extract and save data within the fields of a 2360 dbf record. 2361 """ 2362 2363 __slots__ = ('_recnum', '_meta', '_data', '_old_data', '_dirty', 2364 '_memos', '_write_to_disk', '__weakref__') 2365
2366 - def __new__(cls, recnum, layout, kamikaze=b'', _fromdisk=False):
2367 """ 2368 record = ascii array of entire record; 2369 layout=record specification; 2370 memo = memo object for table 2371 """ 2372 record = object.__new__(cls) 2373 record._dirty = False 2374 record._recnum = recnum 2375 record._meta = layout 2376 record._memos = {} 2377 record._write_to_disk = True 2378 record._old_data = None 2379 header = layout.header 2380 record._data = layout.blankrecord[:] 2381 if kamikaze and len(record._data) != len(kamikaze): 2382 raise BadDataError("record data is not the correct length (should be %r, not %r)" % 2383 (len(record._data), len(kamikaze)), data=kamikaze[:]) 2384 if recnum == -1: # not a disk-backed record 2385 return record 2386 elif type(kamikaze) == array: 2387 record._data = kamikaze[:] 2388 elif type(kamikaze) == bytes: 2389 if kamikaze: 2390 record._data = array('B', kamikaze) 2391 else: 2392 raise BadDataError("%r recieved for record data" % kamikaze) 2393 if record._data[0] == NULL: 2394 record._data[0] = SPACE 2395 if record._data[0] not in (SPACE, ASTERISK): 2396 raise DbfError("record data not correct -- first character should be a ' ' or a '*'.") 2397 if not _fromdisk and layout.location == ON_DISK: 2398 record._update_disk() 2399 return record
2400
2401 - def __contains__(self, value):
2402 for field in self._meta.user_fields: 2403 if self[field] == value: 2404 return True 2405 return False
2406
2407 - def __enter__(self):
2408 if not self._write_to_disk: 2409 raise DbfError("`with record` is not reentrant") 2410 self._start_flux() 2411 return self
2412
2413 - def __eq__(self, other):
2414 if not isinstance(other, (Record, RecordTemplate, dict, tuple)): 2415 return NotImplemented 2416 if isinstance(other, (Record, RecordTemplate)): 2417 if field_names(self) != field_names(other): 2418 return False 2419 for field in self._meta.user_fields: 2420 s_value, o_value = self[field], other[field] 2421 if s_value is not o_value and s_value != o_value: 2422 return False 2423 elif isinstance(other, dict): 2424 if sorted(field_names(self)) != sorted(other.keys()): 2425 return False 2426 for field in self._meta.user_fields: 2427 s_value, o_value = self[field], other[field] 2428 if s_value is not o_value and s_value != o_value: 2429 return False 2430 else: # tuple 2431 if len(self) != len(other): 2432 return False 2433 for s_value, o_value in zip(self, other): 2434 if s_value is not o_value and s_value != o_value: 2435 return False 2436 return True
2437
2438 - def __exit__(self, *args):
2439 if args == (None, None, None): 2440 self._commit_flux() 2441 else: 2442 self._rollback_flux()
2443
2444 - def __iter__(self):
2445 return (self[field] for field in self._meta.user_fields)
2446
2447 - def __getattr__(self, name):
2448 if name[0:2] == '__' and name[-2:] == '__': 2449 raise AttributeError('Method %s is not implemented.' % name) 2450 if not name in self._meta.fields: 2451 raise FieldMissingError(name) 2452 if name in self._memos: 2453 return self._memos[name] 2454 try: 2455 index = self._meta.fields.index(name) 2456 value = self._retrieve_field_value(index, name) 2457 return value 2458 except DbfError: 2459 error = sys.exc_info()[1] 2460 error.message = "field --%s-- is %s -> %s" % (name, self._meta.fieldtypes[fielddef['type']]['Type'], error.message) 2461 raise
2462
2463 - def __getitem__(self, item):
2464 if isinstance(item, baseinteger): 2465 fields = self._meta.user_fields 2466 field_count = len(fields) 2467 if not -field_count <= item < field_count: 2468 raise NotFoundError("Field offset %d is not in record" % item) 2469 field = fields[item] 2470 if field in self._memos: 2471 return self._memos[field] 2472 return self[field] 2473 elif isinstance(item, slice): 2474 sequence = [] 2475 if isinstance(item.start, basestring) or isinstance(item.stop, basestring): 2476 field_names = dbf.field_names(self) 2477 start, stop, step = item.start, item.stop, item.step 2478 if start not in field_names or stop not in field_names: 2479 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop)) 2480 if step is not None and not isinstance(step, baseinteger): 2481 raise DbfError("step value must be an int, not %r" % type(step)) 2482 start = field_names.index(start) 2483 stop = field_names.index(stop) + 1 2484 item = slice(start, stop, step) 2485 for index in self._meta.fields[item]: 2486 sequence.append(self[index]) 2487 return sequence 2488 elif isinstance(item, basestring): 2489 return self.__getattr__(item) 2490 else: 2491 raise TypeError("%r is not a field name" % item)
2492
2493 - def __len__(self):
2494 return self._meta.user_field_count
2495
2496 - def __ne__(self, other):
2497 if not isinstance(other, (Record, RecordTemplate, dict, tuple)): 2498 return NotImplemented 2499 return not self == other
2500
2501 - def __setattr__(self, name, value):
2502 if name in self.__slots__: 2503 object.__setattr__(self, name, value) 2504 return 2505 if self._meta.status != READ_WRITE: 2506 raise DbfError("%s not in read/write mode" % self._meta.filename) 2507 elif self._write_to_disk: 2508 raise DbfError("unable to modify fields individually except in `with` or `Process()`") 2509 elif not name in self._meta.fields: 2510 raise FieldMissingError(name) 2511 if name in self._meta.memofields: 2512 self._memos[name] = value 2513 self._dirty = True 2514 return 2515 index = self._meta.fields.index(name) 2516 try: 2517 self._update_field_value(index, name, value) 2518 except DbfError: 2519 error = sys.exc_info()[1] 2520 fielddef = self._meta[name] 2521 message = "%s (%s) = %r --> %s" % (name, self._meta.fieldtypes[fielddef[TYPE]]['Type'], value, error.args) 2522 data = name 2523 err_cls = error.__class__ 2524 raise err_cls(message, data)
2525
2526 - def __setitem__(self, name, value):
2527 if self._meta.status != READ_WRITE: 2528 raise DbfError("%s not in read/write mode" % self._meta.filename) 2529 if self._write_to_disk: 2530 raise DbfError("unable to modify fields individually except in `with` or `Process()`") 2531 if isinstance(name, basestring): 2532 self.__setattr__(name, value) 2533 elif isinstance(name, baseinteger): 2534 self.__setattr__(self._meta.fields[name], value) 2535 elif isinstance(name, slice): 2536 sequence = [] 2537 field_names = dbf.field_names(self) 2538 if isinstance(name.start, basestring) or isinstance(name.stop, basestring): 2539 start, stop, step = name.start, name.stop, name.step 2540 if start not in field_names or stop not in field_names: 2541 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop)) 2542 if step is not None and not isinstance(step, baseinteger): 2543 raise DbfError("step value must be an int, not %r" % type(step)) 2544 start = field_names.index(start) 2545 stop = field_names.index(stop) + 1 2546 name = slice(start, stop, step) 2547 for field in self._meta.fields[name]: 2548 sequence.append(field) 2549 if len(sequence) != len(value): 2550 raise DbfError("length of slices not equal") 2551 for field, val in zip(sequence, value): 2552 self[field] = val 2553 else: 2554 raise TypeError("%s is not a field name" % name)
2555
2556 - def __str__(self):
2557 result = [] 2558 for seq, field in enumerate(field_names(self)): 2559 result.append("%3d - %-10s: %r" % (seq, field, self[field])) 2560 return '\n'.join(result)
2561
2562 - def __repr__(self):
2563 return self._data.tobytes().decode('latin1')
2564
2565 - def _commit_flux(self):
2566 """ 2567 stores field updates to disk; if any errors restores previous contents and propogates exception 2568 """ 2569 if self._write_to_disk: 2570 raise DbfError("record not in flux") 2571 try: 2572 self._write() 2573 except Exception: 2574 exc = sys.exc_info()[1] 2575 self._data[:] = self._old_data 2576 self._update_disk(data=self._old_data) 2577 raise DbfError("unable to write updates to disk, original data restored: %r" % (exc,)) from None 2578 self._memos.clear() 2579 self._old_data = None 2580 self._write_to_disk = True 2581 self._reindex_record()
2582 2583 @classmethod
2584 - def _create_blank_data(cls, layout):
2585 """ 2586 creates a blank record data chunk 2587 """ 2588 record = object.__new__(cls) 2589 record._dirty = False 2590 record._recnum = -1 2591 record._meta = layout 2592 record._data = array('B', b' ' * layout.header.record_length) 2593 layout.memofields = [] 2594 signature = [layout.table().codepage.name] 2595 for index, name in enumerate(layout.fields): 2596 if name == '_nullflags': 2597 record._data[layout['_nullflags'][START]:layout['_nullflags'][END]] = array('B', [0] * layout['_nullflags'][LENGTH]) 2598 for index, name in enumerate(layout.fields): 2599 signature.append(name) 2600 if name != '_nullflags': 2601 type = FieldType(layout[name][TYPE]) 2602 start = layout[name][START] 2603 size = layout[name][LENGTH] 2604 end = layout[name][END] 2605 blank = layout.fieldtypes[type]['Blank'] 2606 record._data[start:end] = array('B', blank(size)) 2607 if layout[name][TYPE] in layout.memo_types: 2608 layout.memofields.append(name) 2609 decimals = layout[name][DECIMALS] 2610 signature[-1] = '_'.join([str(x) for x in (signature[-1], type.symbol, size, decimals)]) 2611 layout.blankrecord = record._data[:] 2612 data_types = [] 2613 for fieldtype, defs in sorted(layout.fieldtypes.items()): 2614 if fieldtype != _NULLFLAG: # ignore the nullflags field 2615 data_types.append("%s_%s_%s" % (fieldtype.symbol, defs['Empty'], defs['Class'])) 2616 layout.record_sig = ('___'.join(signature), '___'.join(data_types))
2617
2618 - def _reindex_record(self):
2619 """ 2620 rerun all indices with this record 2621 """ 2622 if self._meta.status == CLOSED: 2623 raise DbfError("%s is closed; cannot alter indices" % self._meta.filename) 2624 elif not self._write_to_disk: 2625 raise DbfError("unable to reindex record until it is written to disk") 2626 for dbfindex in self._meta.table()._indexen: 2627 dbfindex(self)
2628
2629 - def _retrieve_field_value(self, index, name):
2630 """ 2631 calls appropriate routine to convert value stored in field from array 2632 """ 2633 fielddef = self._meta[name] 2634 flags = fielddef[FLAGS] 2635 nullable = flags & NULLABLE and '_nullflags' in self._meta 2636 binary = flags & BINARY 2637 if nullable: 2638 byte, bit = divmod(index, 8) 2639 null_def = self._meta['_nullflags'] 2640 null_data = self._data[null_def[START]:null_def[END]] 2641 try: 2642 if null_data[byte] >> bit & 1: 2643 return Null 2644 except IndexError: 2645 print(null_data) 2646 print(index) 2647 print(byte, bit) 2648 print(len(self._data), self._data) 2649 print(null_def) 2650 print(null_data) 2651 raise 2652 2653 record_data = self._data[fielddef[START]:fielddef[END]] 2654 field_type = fielddef[TYPE] 2655 retrieve = self._meta.fieldtypes[field_type]['Retrieve'] 2656 datum = retrieve(record_data, fielddef, self._meta.memo, self._meta.decoder) 2657 return datum
2658
2659 - def _rollback_flux(self):
2660 """ 2661 discards all changes since ._start_flux() 2662 """ 2663 if self._write_to_disk: 2664 raise DbfError("record not in flux") 2665 self._data = self._old_data 2666 self._old_data = None 2667 self._memos.clear() 2668 self._write_to_disk = True 2669 self._write()
2670
2671 - def _start_flux(self):
2672 """ 2673 Allows record.field_name = ... and record[...] = ...; must use ._commit_flux() to commit changes 2674 """ 2675 if self._meta.status == CLOSED: 2676 raise DbfError("%s is closed; cannot modify record" % self._meta.filename) 2677 elif self._recnum < 0: 2678 raise DbfError("record has been packed; unable to update") 2679 elif not self._write_to_disk: 2680 raise DbfError("record already in a state of flux") 2681 self._old_data = self._data[:] 2682 self._write_to_disk = False
2683
2684 - def _update_field_value(self, index, name, value):
2685 """ 2686 calls appropriate routine to convert value to bytes, and save it in record 2687 """ 2688 fielddef = self._meta[name] 2689 field_type = fielddef[TYPE] 2690 flags = fielddef[FLAGS] 2691 binary = flags & BINARY 2692 nullable = flags & NULLABLE and '_nullflags' in self._meta 2693 update = self._meta.fieldtypes[field_type]['Update'] 2694 if nullable: 2695 byte, bit = divmod(index, 8) 2696 null_def = self._meta['_nullflags'] 2697 null_data = self._data[null_def[START]:null_def[END]] #.tostring() 2698 # null_data = [ord(c) for c in null_data] 2699 if value is Null: 2700 null_data[byte] |= 1 << bit 2701 value = None 2702 else: 2703 null_data[byte] &= 0xff ^ 1 << bit 2704 # null_data = array('B', [chr(n) for n in null_data]) 2705 self._data[null_def[START]:null_def[END]] = null_data 2706 if value is not Null: 2707 bytes = array('B', update(value, fielddef, self._meta.memo, self._meta.input_decoder, self._meta.encoder)) 2708 size = fielddef[LENGTH] 2709 if len(bytes) > size: 2710 raise DataOverflowError("tried to store %d bytes in %d byte field" % (len(bytes), size)) 2711 blank = array('B', b' ' * size) 2712 start = fielddef[START] 2713 end = start + size 2714 blank[:len(bytes)] = bytes[:] 2715 self._data[start:end] = blank[:] 2716 self._dirty = True
2717
2718 - def _update_disk(self, location='', data=None):
2719 layout = self._meta 2720 if self._recnum < 0: 2721 raise DbfError("cannot update a packed record") 2722 if layout.location == ON_DISK: 2723 header = layout.header 2724 if location == '': 2725 location = self._recnum * header.record_length + header.start 2726 if data is None: 2727 data = self._data 2728 layout.dfd.seek(location) 2729 layout.dfd.write(data) 2730 self._dirty = False 2731 table = layout.table() 2732 if table is not None: # is None when table is being destroyed 2733 for index in table._indexen: 2734 index(self)
2735
2736 - def _write(self):
2737 for field, value in self._memos.items(): 2738 index = self._meta.fields.index(field) 2739 self._update_field_value(index, field, value) 2740 self._update_disk()
2741
2742 2743 -class RecordTemplate(object):
2744 """ 2745 Provides routines to mimic a dbf record. 2746 """ 2747 2748 __slots__ = ('_meta', '_data', '_old_data', '_memos', '_write_to_disk', '__weakref__') 2749
2750 - def _commit_flux(self):
2751 """ 2752 Flushes field updates to disk 2753 If any errors restores previous contents and raises `DbfError` 2754 """ 2755 if self._write_to_disk: 2756 raise DbfError("record not in flux") 2757 self._memos.clear() 2758 self._old_data = None 2759 self._write_to_disk = True
2760
2761 - def _retrieve_field_value(self, index, name):
2762 """ 2763 Calls appropriate routine to convert value stored in field from 2764 array 2765 """ 2766 fielddef = self._meta[name] 2767 flags = fielddef[FLAGS] 2768 nullable = flags & NULLABLE and '_nullflags' in self._meta 2769 binary = flags & BINARY 2770 if nullable: 2771 byte, bit = divmod(index, 8) 2772 null_def = self._meta['_nullflags'] 2773 null_data = self._data[null_def[START]:null_def[END]] 2774 if null_data[byte] >> bit & 1: 2775 return Null 2776 record_data = self._data[fielddef[START]:fielddef[END]] 2777 field_type = fielddef[TYPE] 2778 retrieve = self._meta.fieldtypes[field_type]['Retrieve'] 2779 datum = retrieve(record_data, fielddef, self._meta.memo, self._meta.decoder) 2780 return datum
2781
2782 - def _rollback_flux(self):
2783 """ 2784 discards all changes since ._start_flux() 2785 """ 2786 if self._write_to_disk: 2787 raise DbfError("template not in flux") 2788 self._data = self._old_data 2789 self._old_data = None 2790 self._memos.clear() 2791 self._write_to_disk = True
2792
2793 - def _start_flux(self):
2794 """ 2795 Allows record.field_name = ... and record[...] = ...; must use ._commit_flux() to commit changes 2796 """ 2797 if not self._write_to_disk: 2798 raise DbfError("template already in a state of flux") 2799 self._old_data = self._data[:] 2800 self._write_to_disk = False
2801
2802 - def _update_field_value(self, index, name, value):
2803 """ 2804 calls appropriate routine to convert value to ascii bytes, and save it in record 2805 """ 2806 fielddef = self._meta[name] 2807 field_type = fielddef[TYPE] 2808 flags = fielddef[FLAGS] 2809 binary = flags & BINARY 2810 nullable = flags & NULLABLE and '_nullflags' in self._meta 2811 update = self._meta.fieldtypes[field_type]['Update'] 2812 if nullable: 2813 byte, bit = divmod(index, 8) 2814 null_def = self._meta['_nullflags'] 2815 null_data = self._data[null_def[START]:null_def[END]] #.tostring() 2816 # null_data = [ord(c) for c in null_data] 2817 if value is Null: 2818 null_data[byte] |= 1 << bit 2819 value = None 2820 else: 2821 null_data[byte] &= 0xff ^ 1 << bit 2822 # null_data = array('B', [chr(n) for n in null_data]) 2823 self._data[null_def[START]:null_def[END]] = null_data 2824 if value is not Null: 2825 bytes = array('B', update(value, fielddef, self._meta.memo, self._meta.input_decoder, self._meta.encoder)) 2826 size = fielddef[LENGTH] 2827 if len(bytes) > size: 2828 raise DataOverflowError("tried to store %d bytes in %d byte field" % (len(bytes), size)) 2829 blank = array('B', b' ' * size) 2830 start = fielddef[START] 2831 end = start + size 2832 blank[:len(bytes)] = bytes[:] 2833 self._data[start:end] = blank[:]
2834
2835 - def __new__(cls, layout, original_record=None, defaults=None):
2836 """ 2837 record = ascii array of entire record; layout=record specification 2838 """ 2839 sig = layout.record_sig 2840 if sig not in _Template_Records: 2841 table = layout.table() 2842 _Template_Records[sig] = table.new( 2843 ':%s:' % layout.filename, 2844 default_data_types=table._meta._default_data_types, 2845 field_data_types=table._meta._field_data_types, on_disk=False 2846 )._meta 2847 layout = _Template_Records[sig] 2848 record = object.__new__(cls) 2849 record._write_to_disk = True 2850 record._meta = layout 2851 record._memos = {} 2852 for name in layout.memofields: 2853 field_type = layout[name][TYPE] 2854 record._memos[name] = layout.fieldtypes[field_type]['Empty']() 2855 header = layout.header 2856 if original_record is None: 2857 record._data = layout.blankrecord[:] 2858 else: 2859 record._data = original_record._data[:] 2860 for name in layout.memofields: 2861 record._memos[name] = original_record[name] 2862 for field in field_names(defaults or {}): 2863 record[field] = defaults[field] 2864 record._old_data = record._data[:] 2865 return record
2866
2867 - def __contains__(self, key):
2868 return key in self._meta.user_fields
2869
2870 - def __eq__(self, other):
2871 if not isinstance(other, (Record, RecordTemplate, dict, tuple)): 2872 return NotImplemented 2873 if isinstance(other, (Record, RecordTemplate)): 2874 if field_names(self) != field_names(other): 2875 return False 2876 for field in self._meta.user_fields: 2877 s_value, o_value = self[field], other[field] 2878 if s_value is not o_value and s_value != o_value: 2879 return False 2880 elif isinstance(other, dict): 2881 if sorted(field_names(self)) != sorted(other.keys()): 2882 return False 2883 for field in self._meta.user_fields: 2884 s_value, o_value = self[field], other[field] 2885 if s_value is not o_value and s_value != o_value: 2886 return False 2887 else: # tuple 2888 if len(self) != len(other): 2889 return False 2890 for s_value, o_value in zip(self, other): 2891 if s_value is not o_value and s_value != o_value: 2892 return False 2893 return True
2894
2895 - def __iter__(self):
2896 return (self[field] for field in self._meta.user_fields)
2897
2898 - def __getattr__(self, name):
2899 if name[0:2] == '__' and name[-2:] == '__': 2900 raise AttributeError('Method %s is not implemented.' % name) 2901 if not name in self._meta.fields: 2902 raise FieldMissingError(name) 2903 if name in self._memos: 2904 return self._memos[name] 2905 try: 2906 index = self._meta.fields.index(name) 2907 value = self._retrieve_field_value(index, name) 2908 return value 2909 except DbfError: 2910 error = sys.exc_info()[1] 2911 error.message = "field --%s-- is %s -> %s" % (name, self._meta.fieldtypes[fielddef['type']]['Type'], error.message) 2912 raise
2913
2914 - def __getitem__(self, item):
2915 fields = self._meta.user_fields 2916 if isinstance(item, baseinteger): 2917 field_count = len(fields) 2918 if not -field_count <= item < field_count: 2919 raise NotFoundError("Field offset %d is not in record" % item) 2920 field = fields[item] 2921 if field in self._memos: 2922 return self._memos[field] 2923 return self[field] 2924 elif isinstance(item, slice): 2925 sequence = [] 2926 if isinstance(item.start, basestring) or isinstance(item.stop, basestring): 2927 start, stop, step = item.start, item.stop, item.step 2928 if start not in fields or stop not in fields: 2929 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop)) 2930 if step is not None and not isinstance(step, baseinteger): 2931 raise DbfError("step value must be an int, not %r" % type(step)) 2932 start = fields.index(start) 2933 stop = fields.index(stop) + 1 2934 item = slice(start, stop, step) 2935 for index in self._meta.fields[item]: 2936 sequence.append(self[index]) 2937 return sequence 2938 elif isinstance(item, basestring): 2939 return self.__getattr__(item) 2940 else: 2941 raise TypeError("%r is not a field name" % item)
2942
2943 - def __len__(self):
2944 return self._meta.user_field_count
2945
2946 - def __ne__(self, other):
2947 if not isinstance(other, (Record, RecordTemplate, dict, tuple)): 2948 return NotImplemented 2949 return not self == other
2950
2951 - def __setattr__(self, name, value):
2952 if name in self.__slots__: 2953 object.__setattr__(self, name, value) 2954 return 2955 if not name in self._meta.fields: 2956 raise FieldMissingError(name) 2957 if name in self._meta.memofields: 2958 self._memos[name] = value 2959 self._dirty = True 2960 return 2961 index = self._meta.fields.index(name) 2962 try: 2963 self._update_field_value(index, name, value) 2964 except DbfError: 2965 error = sys.exc_info()[1] 2966 fielddef = self._meta[name] 2967 message = "%s (%s) = %r --> %s" % (name, self._meta.fieldtypes[fielddef[TYPE]]['Type'], value, error.message) 2968 data = name 2969 err_cls = error.__class__ 2970 raise err_cls(message, data) from None
2971
2972 - def __setitem__(self, name, value):
2973 if isinstance(name, basestring): 2974 self.__setattr__(name, value) 2975 elif isinstance(name, baseinteger): 2976 self.__setattr__(self._meta.fields[name], value) 2977 elif isinstance(name, slice): 2978 sequence = [] 2979 field_names = dbf.field_names(self) 2980 if isinstance(name.start, basestring) or isinstance(name.stop, basestring): 2981 start, stop, step = name.start, name.stop, name.step 2982 if start not in field_names or stop not in field_names: 2983 raise MissingFieldError("Either %r or %r (or both) are not valid field names" % (start, stop)) 2984 if step is not None and not isinstance(step, baseinteger): 2985 raise DbfError("step value must be an int, not %r" % type(step)) 2986 start = field_names.index(start) 2987 stop = field_names.index(stop) + 1 2988 name = slice(start, stop, step) 2989 for field in self._meta.fields[name]: 2990 sequence.append(field) 2991 if len(sequence) != len(value): 2992 raise DbfError("length of slices not equal") 2993 for field, val in zip(sequence, value): 2994 self[field] = val 2995 else: 2996 raise TypeError("%s is not a field name" % name)
2997 2998
2999 - def __repr__(self):
3000 return self._data.tobytes()
3001
3002 - def __str__(self):
3003 result = [] 3004 for seq, field in enumerate(field_names(self)): 3005 result.append("%3d - %-10s: %r" % (seq, field, self[field])) 3006 return '\n'.join(result)
3007
3008 3009 -class RecordVaporWare(object):
3010 """ 3011 Provides routines to mimic a dbf record, but all values are non-existent. 3012 """ 3013 3014 __slots__ = ('_recno', '_sequence') 3015
3016 - def __new__(cls, position, sequence):
3017 """ 3018 record = ascii array of entire record 3019 layout=record specification 3020 memo = memo object for table 3021 """ 3022 if position not in ('bof', 'eof'): 3023 raise ValueError("position should be 'bof' or 'eof', not %r" % position) 3024 vapor = object.__new__(cls) 3025 vapor._recno = (-1, None)[position == 'eof'] 3026 vapor._sequence = sequence 3027 return vapor
3028
3029 - def __contains__(self, key):
3030 return False
3031
3032 - def __eq__(self, other):
3033 if not isinstance(other, (Record, RecordTemplate, RecordVaporWare, dict, tuple)): 3034 return NotImplemented 3035 return False
3036 3037
3038 - def __getattr__(self, name):
3039 if name[0:2] == '__' and name[-2:] == '__': 3040 raise AttributeError('Method %s is not implemented.' % name) 3041 else: 3042 return Vapor
3043
3044 - def __getitem__(self, item):
3045 if isinstance(item, baseinteger): 3046 return Vapor 3047 elif isinstance(item, slice): 3048 raise TypeError('slice notation not allowed on Vapor records') 3049 elif isinstance(item, basestring): 3050 return self.__getattr__(item) 3051 else: 3052 raise TypeError("%r is not a field name" % item)
3053
3054 - def __len__(self):
3055 raise TypeError("Vapor records have no length")
3056
3057 - def __ne__(self, other):
3058 if not isinstance(other, (Record, RecordTemplate, RecordVaporWare, dict, tuple)): 3059 return NotImplemented 3060 return True
3061
3062 - def __bool__(self):
3063 """ 3064 Vapor records are always False 3065 """ 3066 return False
3067
3068 - def __setattr__(self, name, value):
3069 if name in self.__slots__: 3070 object.__setattr__(self, name, value) 3071 return 3072 raise TypeError("cannot change Vapor record")
3073
3074 - def __setitem__(self, name, value):
3075 if isinstance(name, (basestring, baseinteger)): 3076 raise TypeError("cannot change Vapor record") 3077 elif isinstance(name, slice): 3078 raise TypeError("slice notation not allowed on Vapor records") 3079 else: 3080 raise TypeError("%s is not a field name" % name)
3081
3082 - def __repr__(self):
3083 return "RecordVaporWare(position=%r, sequence=%r)" % (('bof', 'eof')[recno(self) is None], self._sequence)
3084
3085 - def __str__(self):
3086 return 'VaporRecord(%r)' % recno(self)
3087 3088 @property
3089 - def _recnum(self):
3090 if self._recno is None: 3091 return len(self._sequence) 3092 else: 3093 return self._recno
3094
3095 3096 -class _DbfMemo(object):
3097 """ 3098 Provides access to memo fields as dictionaries 3099 Must override _init, _get_memo, and _put_memo to 3100 store memo contents to disk 3101 """ 3102
3103 - def _init(self):
3104 """ 3105 Initialize disk file usage 3106 """
3107
3108 - def _get_memo(self, block):
3109 """ 3110 Retrieve memo contents from disk 3111 """
3112
3113 - def _put_memo(self, data):
3114 """ 3115 Store memo contents to disk 3116 """
3117
3118 - def _zap(self):
3119 """ 3120 Resets memo structure back to zero memos 3121 """ 3122 self.memory.clear() 3123 self.nextmemo = 1
3124
3125 - def __init__(self, meta):
3126 self.meta = meta 3127 self.memory = {} 3128 self.nextmemo = 1 3129 self._init() 3130 self.meta.newmemofile = False
3131
3132 - def get_memo(self, block):
3133 """ 3134 Gets the memo in block 3135 """ 3136 if self.meta.ignorememos or not block: 3137 return '' 3138 if self.meta.location == ON_DISK: 3139 return self._get_memo(block) 3140 else: 3141 return self.memory[block]
3142
3143 - def put_memo(self, data):
3144 """ 3145 Stores data in memo file, returns block number 3146 """ 3147 if self.meta.ignorememos or data == '': 3148 return 0 3149 if self.meta.location == IN_MEMORY: 3150 thismemo = self.nextmemo 3151 self.nextmemo += 1 3152 self.memory[thismemo] = data 3153 else: 3154 thismemo = self._put_memo(data) 3155 return thismemo
3156
3157 3158 -class _Db3Memo(_DbfMemo):
3159 """ 3160 dBase III specific 3161 """ 3162
3163 - def _init(self):
3164 self.meta.memo_size= 512 3165 self.record_header_length = 2 3166 if self.meta.location == ON_DISK and not self.meta.ignorememos: 3167 if self.meta.newmemofile: 3168 self.meta.mfd = open(self.meta.memoname, 'w+b') 3169 self.meta.mfd.write(pack_long_int(1) + b'\x00' * 508) 3170 else: 3171 try: 3172 self.meta.mfd = open(self.meta.memoname, 'r+b') 3173 self.meta.mfd.seek(0) 3174 next = self.meta.mfd.read(4) 3175 self.nextmemo = unpack_long_int(next) 3176 except Exception: 3177 exc = sys.exc_info()[1] 3178 raise DbfError("memo file appears to be corrupt: %r" % exc.args) from None
3179
3180 - def _get_memo(self, block):
3181 block = int(block) 3182 self.meta.mfd.seek(block * self.meta.memo_size) 3183 eom = -1 3184 data = b'' 3185 while eom == -1: 3186 newdata = self.meta.mfd.read(self.meta.memo_size) 3187 if not newdata: 3188 return data 3189 data += newdata 3190 eom = data.find(b'\x1a\x1a') 3191 return data[:eom]
3192
3193 - def _put_memo(self, data):
3194 data = data 3195 length = len(data) + self.record_header_length # room for two ^Z at end of memo 3196 blocks = length // self.meta.memo_size 3197 if length % self.meta.memo_size: 3198 blocks += 1 3199 thismemo = self.nextmemo 3200 self.nextmemo = thismemo + blocks 3201 self.meta.mfd.seek(0) 3202 self.meta.mfd.write(pack_long_int(self.nextmemo)) 3203 self.meta.mfd.seek(thismemo * self.meta.memo_size) 3204 self.meta.mfd.write(data) 3205 self.meta.mfd.write(b'\x1a\x1a') 3206 double_check = self._get_memo(thismemo) 3207 if len(double_check) != len(data): 3208 uhoh = open('dbf_memo_dump.err', 'wb') 3209 uhoh.write('thismemo: %d' % thismemo) 3210 uhoh.write('nextmemo: %d' % self.nextmemo) 3211 uhoh.write('saved: %d bytes' % len(data)) 3212 uhoh.write(data) 3213 uhoh.write('retrieved: %d bytes' % len(double_check)) 3214 uhoh.write(double_check) 3215 uhoh.close() 3216 raise DbfError("unknown error: memo not saved") 3217 return thismemo
3218
3219 - def _zap(self):
3220 if self.meta.location == ON_DISK and not self.meta.ignorememos: 3221 mfd = self.meta.mfd 3222 mfd.seek(0) 3223 mfd.truncate(0) 3224 mfd.write(pack_long_int(1) + b'\x00' * 508) 3225 mfd.flush()
3226
3227 -class _VfpMemo(_DbfMemo):
3228 """ 3229 Visual Foxpro 6 specific 3230 """ 3231
3232 - def _init(self):
3233 if self.meta.location == ON_DISK and not self.meta.ignorememos: 3234 self.record_header_length = 8 3235 if self.meta.newmemofile: 3236 if self.meta.memo_size == 0: 3237 self.meta.memo_size = 1 3238 elif 1 < self.meta.memo_size < 33: 3239 self.meta.memo_size *= 512 3240 self.meta.mfd = open(self.meta.memoname, 'w+b') 3241 nextmemo = 512 // self.meta.memo_size 3242 if nextmemo * self.meta.memo_size < 512: 3243 nextmemo += 1 3244 self.nextmemo = nextmemo 3245 self.meta.mfd.write(pack_long_int(nextmemo, bigendian=True) + b'\x00\x00' + \ 3246 pack_short_int(self.meta.memo_size, bigendian=True) + b'\x00' * 504) 3247 else: 3248 try: 3249 self.meta.mfd = open(self.meta.memoname, 'r+b') 3250 self.meta.mfd.seek(0) 3251 header = self.meta.mfd.read(512) 3252 self.nextmemo = unpack_long_int(header[:4], bigendian=True) 3253 self.meta.memo_size = unpack_short_int(header[6:8], bigendian=True) 3254 except Exception: 3255 exc = sys.exc_info()[1] 3256 raise DbfError("memo file appears to be corrupt: %r" % exc.args) from None
3257
3258 - def _get_memo(self, block):
3259 self.meta.mfd.seek(block * self.meta.memo_size) 3260 header = self.meta.mfd.read(8) 3261 length = unpack_long_int(header[4:], bigendian=True) 3262 return self.meta.mfd.read(length)
3263
3264 - def _put_memo(self, data):
3265 data = data 3266 self.meta.mfd.seek(0) 3267 thismemo = unpack_long_int(self.meta.mfd.read(4), bigendian=True) 3268 self.meta.mfd.seek(0) 3269 length = len(data) + self.record_header_length 3270 blocks = length // self.meta.memo_size 3271 if length % self.meta.memo_size: 3272 blocks += 1 3273 self.meta.mfd.write(pack_long_int(thismemo + blocks, bigendian=True)) 3274 self.meta.mfd.seek(thismemo * self.meta.memo_size) 3275 self.meta.mfd.write(b'\x00\x00\x00\x01' + pack_long_int(len(data), bigendian=True) + data) 3276 return thismemo
3277
3278 - def _zap(self):
3279 if self.meta.location == ON_DISK and not self.meta.ignorememos: 3280 mfd = self.meta.mfd 3281 mfd.seek(0) 3282 mfd.truncate(0) 3283 nextmemo = 512 // self.meta.memo_size 3284 if nextmemo * self.meta.memo_size < 512: 3285 nextmemo += 1 3286 self.nextmemo = nextmemo 3287 mfd.write(pack_long_int(nextmemo, bigendian=True) + b'\x00\x00' + \ 3288 pack_short_int(self.meta.memo_size, bigendian=True) + b'\x00' * 504) 3289 mfd.flush()
3290
3291 3292 -class DbfCsv(csv.Dialect):
3293 """ 3294 csv format for exporting tables 3295 """ 3296 delimiter = ',' 3297 doublequote = True 3298 escapechar = None 3299 lineterminator = '\n' 3300 quotechar = '"' 3301 skipinitialspace = True 3302 quoting = csv.QUOTE_NONNUMERIC
3303 csv.register_dialect('dbf', DbfCsv)
3304 3305 3306 -class _DeadObject(object):
3307 """ 3308 used because you cannot weakref None 3309 """ 3310
3311 - def __bool__(self):
3312 return False
3313 3314 _DeadObject = _DeadObject() 3315 3316 3317 # Routines for saving, retrieving, and creating fields 3318 3319 VFPTIME = 1721425
3320 3321 -def pack_short_int(value, bigendian=False):
3322 """ 3323 Returns a two-bye integer from the value, or raises DbfError 3324 """ 3325 # 256 / 65,536 3326 if value > 65535: 3327 raise DataOverflowError("Maximum Integer size exceeded. Possible: 65535. Attempted: %d" % value) 3328 if bigendian: 3329 return struct.pack('>H', value) 3330 else: 3331 return struct.pack('<H', value)
3332
3333 -def pack_long_int(value, bigendian=False):
3334 """ 3335 Returns a four-bye integer from the value, or raises DbfError 3336 """ 3337 # 256 / 65,536 / 16,777,216 3338 if value > 4294967295: 3339 raise DataOverflowError("Maximum Integer size exceeded. Possible: 4294967295. Attempted: %d" % value) 3340 if bigendian: 3341 return struct.pack('>L', value) 3342 else: 3343 return struct.pack('<L', value)
3344
3345 -def pack_str(string):
3346 """ 3347 Returns an 11 byte, upper-cased, null padded string suitable for field names; 3348 raises DbfError if the string is bigger than 10 bytes 3349 """ 3350 if len(string) > 10: 3351 raise DbfError("Maximum string size is ten characters -- %s has %d characters" % (string, len(string))) 3352 return struct.pack('11s', string.upper())
3353
3354 -def unpack_short_int(bytes, bigendian=False):
3355 """ 3356 Returns the value in the two-byte integer passed in 3357 """ 3358 if bigendian: 3359 return struct.unpack('>H', bytes)[0] 3360 else: 3361 return struct.unpack('<H', bytes)[0]
3362
3363 -def unpack_long_int(bytes, bigendian=False):
3364 """ 3365 Returns the value in the four-byte integer passed in 3366 """ 3367 if bigendian: 3368 return int(struct.unpack('>L', bytes)[0]) 3369 else: 3370 return int(struct.unpack('<L', bytes)[0])
3371
3372 -def unpack_str(chars):
3373 """ 3374 Returns a normal, lower-cased string from a null-padded byte string 3375 """ 3376 field = struct.unpack('%ds' % len(chars), chars)[0] 3377 name = [] 3378 for ch in field: 3379 if ch == NULL: 3380 break 3381 name.append(ch) 3382 return bytes(name).lower()
3383
3384 -def scinot(value, decimals):
3385 """ 3386 return scientific notation with not more than decimals-1 decimal places 3387 """ 3388 value = str(value) 3389 sign = '' 3390 if value[0] in ('+-'): 3391 sign = value[0] 3392 if sign == '+': 3393 sign = '' 3394 value = value[1:] 3395 if 'e' in value: #7.8e-05 3396 e = value.find('e') 3397 if e - 1 <= decimals: 3398 return sign + value 3399 integer, mantissa, power = value[0], value[1:e], value[e+1:] 3400 mantissa = mantissa[:decimals] 3401 value = sign + integer + mantissa + 'e' + power 3402 return value 3403 integer, mantissa = value[0], value[1:] 3404 if integer == '0': 3405 for e, integer in enumerate(mantissa): 3406 if integer not in ('.0'): 3407 break 3408 mantissa = '.' + mantissa[e+1:] 3409 mantissa = mantissa[:decimals] 3410 value = sign + integer + mantissa + 'e-%03d' % e 3411 return value 3412 e = mantissa.find('.') 3413 mantissa = '.' + mantissa.replace('.','') 3414 mantissa = mantissa[:decimals] 3415 value = sign + integer + mantissa + 'e+%03d' % e 3416 return value
3417
3418 -def unsupported_type(something, *ignore):
3419 """ 3420 called if a data type is not supported for that style of table 3421 """ 3422 return something
3423
3424 -def retrieve_character(bytes, fielddef, memo, decoder):
3425 """ 3426 Returns the string in bytes as fielddef[CLASS] or fielddef[EMPTY] 3427 """ 3428 data = bytes.tobytes() 3429 if fielddef[FLAGS] & BINARY: 3430 return data 3431 data = fielddef[CLASS](decoder(data)[0]) 3432 if not data.strip(): 3433 cls = fielddef[EMPTY] 3434 if cls is NoneType: 3435 return None 3436 return cls(data) 3437 return data
3438
3439 -def update_character(string, fielddef, memo, decoder, encoder):
3440 """ 3441 returns the string as bytes (not unicode) as fielddef[CLASS] or fielddef[EMPTY] 3442 """ 3443 length = fielddef[LENGTH] 3444 if string == None: 3445 return length * b' ' 3446 if fielddef[FLAGS] & BINARY: 3447 if not isinstance(string, bytes): 3448 raise ValueError('binary field: %r not in bytes format' % string) 3449 return string 3450 else: 3451 if not isinstance(string, basestring): 3452 raise ValueError("unable to coerce %r(%r) to string" % (type(string), string)) 3453 string = encoder(string.strip())[0] 3454 return string
3455
3456 -def retrieve_currency(bytes, fielddef, *ignore):
3457 """ 3458 Returns the currency value in bytes 3459 """ 3460 value = struct.unpack('<q', bytes)[0] 3461 return fielddef[CLASS](("%de-4" % value).strip())
3462
3463 -def update_currency(value, *ignore):
3464 """ 3465 Returns the value to be stored in the record's disk data 3466 """ 3467 if value == None: 3468 value = 0 3469 currency = int(value * 10000) 3470 if not -9223372036854775808 < currency < 9223372036854775808: 3471 raise DataOverflowError("value %s is out of bounds" % value) 3472 return struct.pack('<q', currency)
3473
3474 -def retrieve_date(bytes, fielddef, *ignore):
3475 """ 3476 Returns the ascii coded date as fielddef[CLASS] or fielddef[EMPTY] 3477 """ 3478 text = bytes.tobytes() 3479 if text == b' ': 3480 cls = fielddef[EMPTY] 3481 if cls is NoneType: 3482 return None 3483 return cls() 3484 year = int(text[0:4]) 3485 month = int(text[4:6]) 3486 day = int(text[6:8]) 3487 return fielddef[CLASS](year, month, day)
3488
3489 -def update_date(moment, *ignore):
3490 """ 3491 Returns the Date or datetime.date object ascii-encoded (yyyymmdd) 3492 """ 3493 if moment == None: 3494 return b' ' 3495 return ("%04d%02d%02d" % moment.timetuple()[:3]).encode('ascii')
3496
3497 -def retrieve_double(bytes, fielddef, *ignore):
3498 """ 3499 Returns the double in bytes as fielddef[CLASS] ('default' == float) 3500 """ 3501 typ = fielddef[CLASS] 3502 if typ == 'default': 3503 typ = float 3504 return typ(struct.unpack('<d', bytes)[0])
3505
3506 -def update_double(value, *ignore):
3507 """ 3508 returns the value to be stored in the record's disk data 3509 """ 3510 if value == None: 3511 value = 0 3512 return struct.pack('<d', float(value))
3513
3514 -def retrieve_integer(bytes, fielddef, *ignore):
3515 """ 3516 Returns the binary number stored in bytes in little-endian 3517 format as fielddef[CLASS] 3518 """ 3519 typ = fielddef[CLASS] 3520 if typ == 'default': 3521 typ = int 3522 return typ(struct.unpack('<i', bytes)[0])
3523
3524 -def update_integer(value, *ignore):
3525 """ 3526 Returns value in little-endian binary format 3527 """ 3528 if value == None: 3529 value = 0 3530 try: 3531 value = int(value) 3532 except Exception: 3533 raise DbfError("incompatible type: %s(%s)" % (type(value), value)) from None 3534 if not -2147483648 < value < 2147483647: 3535 raise DataOverflowError("Integer size exceeded. Possible: -2,147,483,648..+2,147,483,647. Attempted: %d" % value) 3536 return struct.pack('<i', int(value))
3537
3538 -def retrieve_logical(bytes, fielddef, *ignore):
3539 """ 3540 Returns True if bytes is 't', 'T', 'y', or 'Y' 3541 None if '?' 3542 False otherwise 3543 """ 3544 cls = fielddef[CLASS] 3545 empty = fielddef[EMPTY] 3546 bytes = bytes.tobytes() 3547 if bytes in b'tTyY': 3548 return cls(True) 3549 elif bytes in b'fFnN': 3550 return cls(False) 3551 elif bytes in b'? ': 3552 if empty is NoneType: 3553 return None 3554 return empty() 3555 elif LOGICAL_BAD_IS_NONE: 3556 return None 3557 else: 3558 raise BadDataError('Logical field contained %r' % bytes) 3559 return typ(bytes)
3560
3561 -def update_logical(data, *ignore):
3562 """ 3563 Returns 'T' if logical is True, 'F' if False, '?' otherwise 3564 """ 3565 if data is Unknown or data is None or data is Null or data is Other: 3566 return b'?' 3567 if data == True: 3568 return b'T' 3569 if data == False: 3570 return b'F' 3571 raise ValueError("unable to automatically coerce %r to Logical" % data)
3572
3573 -def retrieve_memo(bytes, fielddef, memo, decoder):
3574 """ 3575 Returns the block of data from a memo file 3576 """ 3577 stringval = bytes.tobytes().strip() 3578 if not stringval or memo is None: 3579 cls = fielddef[EMPTY] 3580 if cls is NoneType: 3581 return None 3582 return cls() 3583 block = int(stringval) 3584 data = memo.get_memo(block) 3585 if fielddef[FLAGS] & BINARY: 3586 return data 3587 return fielddef[CLASS](decoder(data)[0])
3588
3589 -def update_memo(string, fielddef, memo, decoder, encoder):
3590 """ 3591 Writes string as a memo, returns the block number it was saved into 3592 """ 3593 if memo is None: 3594 raise DbfError('Memos are being ignored, unable to update') 3595 if fielddef[FLAGS] & BINARY: 3596 if string == None: 3597 string = b'' 3598 if not isinstance(string, bytes): 3599 raise ValueError('binary field: %r not in bytes format' % string) 3600 else: 3601 if string == None: 3602 string = '' 3603 if not isinstance(string, basestring): 3604 raise ValueError("unable to coerce %r(%r) to string" % (type(string), string)) 3605 string = encoder(string)[0] 3606 block = memo.put_memo(string) 3607 if block == 0: 3608 block = b'' 3609 return ("%*s" % (fielddef[LENGTH], block)).encode('ascii')
3610
3611 -def retrieve_numeric(bytes, fielddef, *ignore):
3612 """ 3613 Returns the number stored in bytes as integer if field spec for 3614 decimals is 0, float otherwise 3615 """ 3616 string = bytes.tobytes().replace(b'\x00', b'').strip() 3617 cls = fielddef[CLASS] 3618 if not string or string[0:1] == b'*': # value too big to store (Visual FoxPro idiocy) 3619 cls = fielddef[EMPTY] 3620 if cls is NoneType: 3621 return None 3622 return cls() 3623 if cls == 'default': 3624 if fielddef[DECIMALS] == 0: 3625 return int(string) 3626 else: 3627 return float(string) 3628 else: 3629 return cls(string)
3630
3631 -def update_numeric(value, fielddef, *ignore):
3632 """ 3633 returns value as ascii representation, rounding decimal 3634 portion as necessary 3635 """ 3636 if value == None: 3637 return fielddef[LENGTH] * b' ' 3638 try: 3639 value = float(value) 3640 except Exception: 3641 raise DbfError("incompatible type: %s(%s)" % (type(value), value)) from None 3642 decimalsize = fielddef[DECIMALS] 3643 totalsize = fielddef[LENGTH] 3644 if decimalsize: 3645 decimalsize += 1 3646 maxintegersize = totalsize - decimalsize 3647 integersize = len("%.0f" % floor(value)) 3648 if integersize > maxintegersize: 3649 if integersize != 1: 3650 raise DataOverflowError('Integer portion too big') 3651 string = scinot(value, decimalsize) 3652 if len(string) > totalsize: 3653 raise DataOverflowError('Value representation too long for field') 3654 return ("%*.*f" % (fielddef[LENGTH], fielddef[DECIMALS], value)).encode('ascii')
3655
3656 -def retrieve_vfp_datetime(bytes, fielddef, *ignore):
3657 """ 3658 returns the date/time stored in bytes; dates <= 01/01/1981 00:00:00 3659 may not be accurate; BC dates are nulled. 3660 """ 3661 # two four-byte integers store the date and time. 3662 # millesecords are discarded from time 3663 if bytes == array('B', [0] * 8): 3664 cls = fielddef[EMPTY] 3665 if cls is NoneType: 3666 return None 3667 return cls() 3668 cls = fielddef[CLASS] 3669 time = unpack_long_int(bytes[4:]) 3670 microseconds = (time % 1000) * 1000 3671 time = time // 1000 # int(round(time, -3)) // 1000 discard milliseconds 3672 hours = time // 3600 3673 mins = time % 3600 // 60 3674 secs = time % 3600 % 60 3675 time = datetime.time(hours, mins, secs, microseconds) 3676 possible = unpack_long_int(bytes[:4]) 3677 possible -= VFPTIME 3678 possible = max(0, possible) 3679 date = datetime.date.fromordinal(possible) 3680 return cls(date.year, date.month, date.day, time.hour, time.minute, time.second, time.microsecond)
3681
3682 -def update_vfp_datetime(moment, *ignore):
3683 """ 3684 Sets the date/time stored in moment 3685 moment must have fields: 3686 year, month, day, hour, minute, second, microsecond 3687 """ 3688 data = [0] * 8 3689 if moment: 3690 hour = moment.hour 3691 minute = moment.minute 3692 second = moment.second 3693 millisecond = moment.microsecond // 1000 # convert from millionths to thousandths 3694 time = ((hour * 3600) + (minute * 60) + second) * 1000 + millisecond 3695 data[4:] = update_integer(time) 3696 data[:4] = update_integer(moment.toordinal() + VFPTIME) 3697 return bytes(data)
3698
3699 -def retrieve_vfp_memo(bytes, fielddef, memo, decoder):
3700 """ 3701 Returns the block of data from a memo file 3702 """ 3703 if memo is None: 3704 block = 0 3705 else: 3706 block = struct.unpack('<i', bytes)[0] 3707 if not block: 3708 cls = fielddef[EMPTY] 3709 if cls is NoneType: 3710 return None 3711 return cls() 3712 data = memo.get_memo(block) 3713 if fielddef[FLAGS] & BINARY: 3714 return data 3715 return fielddef[CLASS](decoder(data)[0])
3716
3717 -def update_vfp_memo(string, fielddef, memo, decoder, encoder):
3718 """ 3719 Writes string as a memo, returns the block number it was saved into 3720 """ 3721 if memo is None: 3722 raise DbfError('Memos are being ignored, unable to update') 3723 if string == None: 3724 return struct.pack('<i', 0) 3725 if fielddef[FLAGS] & BINARY: 3726 # if string == None: 3727 # string = b'' 3728 if not isinstance(string, bytes): 3729 raise ValueError('binary field: %r not in bytes format' % string) 3730 string = bytes(string) 3731 else: 3732 # if string == None: 3733 # string = '' 3734 if not isinstance(string, basestring): 3735 raise ValueError("unable to coerce %r(%r) to string" % (type(string), string)) 3736 string = encoder(string)[0] 3737 block = memo.put_memo(string) 3738 return struct.pack('<i', block)
3739
3740 -def add_character(format, flags):
3741 if format[0][0] != '(' or format[0][-1] != ')' or any([f not in flags for f in format[1:]]): 3742 raise FieldSpecError("Format for Character field creation is 'C(n)%s', not 'C%s'" % field_spec_error_text(format, flags)) 3743 length = int(format[0][1:-1]) 3744 if not 0 < length < 256: 3745 raise FieldSpecError("Character fields must be between 1 and 255, not %d" % length) 3746 decimals = 0 3747 flag = 0 3748 for f in format[1:]: 3749 flag |= FieldFlag.lookup(f) 3750 return length, decimals, flag
3751
3752 -def add_date(format, flags):
3753 if any([f not in flags for f in format]): 3754 raise FieldSpecError("Format for Date field creation is 'D%s', not 'D%s'" % field_spec_error_text(format, flags)) 3755 length = 8 3756 decimals = 0 3757 flag = 0 3758 for f in format: 3759 flag |= FieldFlag.lookup(f) 3760 return length, decimals, flag
3761
3762 -def add_logical(format, flags):
3763 if any([f not in flags for f in format]): 3764 raise FieldSpecError("Format for Logical field creation is 'L%s', not 'L%s'" % field_spec_error_text(format, flags)) 3765 length = 1 3766 decimals = 0 3767 flag = 0 3768 for f in format: 3769 flag |= FieldFlag.lookup(f) 3770 return length, decimals, flag
3771
3772 -def add_memo(format, flags):
3773 if any(f not in flags for f in format): 3774 raise FieldSpecError("Format for Memo field creation is 'M(n)%s', not 'M%s'" % field_spec_error_text(format, flags)) 3775 length = 10 3776 decimals = 0 3777 flag = 0 3778 for f in format: 3779 flag |= FieldFlag.lookup(f) 3780 return length, decimals, flag
3781
3782 -def add_binary_memo(format, flags):
3783 if any(f not in flags for f in format): 3784 raise FieldSpecError("Format for Memo field creation is 'M(n)%s', not 'M%s'" % field_spec_error_text(format, flags)) 3785 length = 10 3786 decimals = 0 3787 flag = 0 3788 for f in format: 3789 flag |= FieldFlag.lookup(f) 3790 flag |= FieldFlag.BINARY 3791 return length, decimals, flag
3792
3793 -def add_numeric(format, flags):
3794 if len(format) > 1 or format[0][0] != '(' or format[0][-1] != ')' or any(f not in flags for f in format[1:]): 3795 raise FieldSpecError("Format for Numeric field creation is 'N(s,d)%s', not 'N%s'" % field_spec_error_text(format, flags)) 3796 length, decimals = format[0][1:-1].split(',') 3797 length = int(length) 3798 decimals = int(decimals) 3799 flag = 0 3800 for f in format[1:]: 3801 flag |= FieldFlag.lookup(f) 3802 if not 0 < length < 20: 3803 raise FieldSpecError("Numeric fields must be between 1 and 19 digits, not %d" % length) 3804 if decimals and not 0 < decimals <= length - 2: 3805 raise FieldSpecError("Decimals must be between 0 and Length-2 (Length: %d, Decimals: %d)" % (length, decimals)) 3806 return length, decimals, flag
3807
3808 -def add_clp_character(format, flags):
3809 if format[0][0] != '(' or format[0][-1] != ')' or any([f not in flags for f in format[1:]]): 3810 raise FieldSpecError("Format for Character field creation is 'C(n)%s', not 'C%s'" % field_spec_error_text(format, flags)) 3811 length = int(format[0][1:-1]) 3812 if not 0 < length < 65519: 3813 raise FieldSpecError("Character fields must be between 1 and 65,519") 3814 decimals = 0 3815 flag = 0 3816 for f in format[1:]: 3817 flag |= FieldFlag.lookup(f) 3818 return length, decimals, flag
3819
3820 -def add_vfp_character(format, flags):
3821 if format[0][0] != '(' or format[0][-1] != ')' or any([f not in flags for f in format[1:]]): 3822 raise FieldSpecError("Format for Character field creation is 'C(n)%s', not 'C%s'" % field_spec_error_text(format, flags)) 3823 length = int(format[0][1:-1]) 3824 if not 0 < length < 255: 3825 raise FieldSpecError("Character fields must be between 1 and 255") 3826 decimals = 0 3827 flag = 0 3828 for f in format[1:]: 3829 flag |= FieldFlag.lookup(f) 3830 return length, decimals, flag
3831
3832 -def add_vfp_currency(format, flags):
3833 if any(f not in flags for f in format[1:]): 3834 raise FieldSpecError("Format for Currency field creation is 'Y%s', not 'Y%s'" % field_spec_error_text(format, flags)) 3835 length = 8 3836 decimals = 0 3837 flag = 0 3838 for f in format: 3839 flag |= FieldFlag.lookup(f) 3840 return length, decimals, flag
3841
3842 -def add_vfp_datetime(format, flags):
3843 if any(f not in flags for f in format[1:]): 3844 raise FieldSpecError("Format for DateTime field creation is 'T%s', not 'T%s'" % field_spec_error_text(format, flags)) 3845 length = 8 3846 decimals = 0 3847 flag = 0 3848 for f in format: 3849 flag |= FieldFlag.lookup(f) 3850 return length, decimals, flag
3851
3852 -def add_vfp_double(format, flags):
3853 if any(f not in flags for f in format[1:]): 3854 raise FieldSpecError("Format for Double field creation is 'B%s', not 'B%s'" % field_spec_error_text(format, flags)) 3855 length = 8 3856 decimals = 0 3857 flag = 0 3858 for f in format: 3859 flag |= FieldFlag.lookup(f) 3860 return length, decimals, flag
3861
3862 -def add_vfp_integer(format, flags):
3863 if any(f not in flags for f in format[1:]): 3864 raise FieldSpecError("Format for Integer field creation is 'I%s', not 'I%s'" % field_spec_error_text(format, flags)) 3865 length = 4 3866 decimals = 0 3867 flag = 0 3868 for f in format: 3869 flag |= FieldFlag.lookup(f) 3870 return length, decimals, flag
3871
3872 -def add_vfp_memo(format, flags):
3873 if any(f not in flags for f in format[1:]): 3874 raise FieldSpecError("Format for Memo field creation is 'M%s', not 'M%s'" % field_spec_error_text(format, flags)) 3875 length = 4 3876 decimals = 0 3877 flag = 0 3878 for f in format: 3879 flag |= FieldFlag.lookup(f) 3880 if 'binary' not in flags: # general or picture -- binary is implied 3881 flag |= FieldFlag.BINARY 3882 return length, decimals, flag
3883
3884 -def add_vfp_binary_memo(format, flags):
3885 if any(f not in flags for f in format[1:]): 3886 raise FieldSpecError("Format for Memo field creation is 'M%s', not 'M%s'" % field_spec_error_text(format, flags)) 3887 length = 4 3888 decimals = 0 3889 flag = 0 3890 for f in format: 3891 flag |= FieldFlag.lookup(f) 3892 # general or picture -- binary is implied 3893 flag |= FieldFlag.BINARY 3894 return length, decimals, flag
3895
3896 -def add_vfp_numeric(format, flags):
3897 if format[0][0] != '(' or format[0][-1] != ')' or any(f not in flags for f in format[1:]): 3898 raise FieldSpecError("Format for Numeric field creation is 'N(s,d)%s', not 'N%s'" % field_spec_error_text(format, flags)) 3899 length, decimals = format[0][1:-1].split(',') 3900 length = int(length) 3901 decimals = int(decimals) 3902 flag = 0 3903 for f in format[1:]: 3904 flag |= FieldFlag.lookup(f) 3905 if not 0 < length < 21: 3906 raise FieldSpecError("Numeric fields must be between 1 and 20 digits, not %d" % length) 3907 if decimals and not 0 < decimals <= length - 2: 3908 raise FieldSpecError("Decimals must be between 0 and Length-2 (Length: %d, Decimals: %d)" % (length, decimals)) 3909 return length, decimals, flag
3910
3911 -def field_spec_error_text(format, flags):
3912 """ 3913 generic routine for error text for the add...() functions 3914 """ 3915 flg = '' 3916 if flags: 3917 flg = ' [ ' + ' | '.join(flags) + ' ]' 3918 frmt = '' 3919 if format: 3920 frmt = ' ' + ' '.join(format) 3921 return flg, frmt
3922
3923 -def ezip(*iters):
3924 """ 3925 extends all iters to longest one, using last value from each as necessary 3926 """ 3927 iters = [iter(x) for x in iters] 3928 last = [None] * len(iters) 3929 while "any iters have items left": 3930 alive = len(iters) 3931 for i, iterator in enumerate(iters): 3932 try: 3933 value = next(iterator) 3934 last[i] = value 3935 except StopIteration: 3936 alive -= 1 3937 if alive: 3938 yield tuple(last) 3939 alive = len(iters) 3940 continue 3941 break
3942
3943 3944 # Public classes 3945 3946 -class Tables(object):
3947 """ 3948 context manager for multiple tables and/or indices 3949 """
3950 - def __init__(yo, *tables):
3951 if len(tables) == 1 and not isinstance(tables[0], (Table, basestring)): 3952 tables = tables[0] 3953 yo._tables = [] 3954 yo._entered = [] 3955 for table in tables: 3956 if isinstance(table, basestring): 3957 table = Table(table) 3958 yo._tables.append(table)
3959 - def __enter__(yo):
3960 for table in yo._tables: 3961 table.__enter__() 3962 yo._entered.append(table) 3963 return tuple(yo._tables)
3964 - def __exit__(yo, *args):
3965 while yo._entered: 3966 table = yo._entered.pop() 3967 try: 3968 table.__exit__() 3969 except Exception: 3970 pass
3971
3972 -class IndexLocation(int):
3973 """ 3974 Represents the index where the match criteria is if True, 3975 or would be if False 3976 3977 Used by Index.index_search 3978 """ 3979
3980 - def __new__(cls, value, found):
3981 "value is the number, found is True/False" 3982 result = int.__new__(cls, value) 3983 result.found = found 3984 return result
3985
3986 - def __bool__(self):
3987 return self.found
3988
3989 3990 -class FieldInfo(tuple):
3991 """ 3992 tuple with named attributes for representing a field's dbf type, 3993 length, decimal portion, and python class 3994 """ 3995 3996 __slots__= () 3997
3998 - def __new__(cls, *args):
3999 if len(args) != 4: 4000 raise TypeError("%s should be called with Type, Length, Decimal size, and Class" % cls.__name__) 4001 return tuple.__new__(cls, args)
4002 4003 @property
4004 - def field_type(self):
4005 return self[0]
4006 4007 @property
4008 - def length(self):
4009 return self[1]
4010 4011 @property
4012 - def decimal(self):
4013 return self[2]
4014 4015 @property
4016 - def py_type(self):
4017 return self[3]
4018
4019 4020 -class CodePage(tuple):
4021 """ 4022 tuple with named attributes for representing a tables codepage 4023 """ 4024 4025 __slots__= () 4026
4027 - def __new__(cls, name):
4028 "call with name of codepage (e.g. 'cp1252')" 4029 code, name, desc = _codepage_lookup(name) 4030 return tuple.__new__(cls, (name, desc, code))
4031
4032 - def __repr__(self):
4033 return "CodePage(%r, %r, %r)" % (self[0], self[1], self[2])
4034
4035 - def __str__(self):
4036 return "%s (%s)" % (self[0], self[1])
4037 4038 @property
4039 - def name(self):
4040 return self[0]
4041 4042 @property
4043 - def desc(self):
4044 return self[1]
4045 4046 @property
4047 - def code(self):
4048 return self[2]
4049
4050 4051 -class Iter(_Navigation):
4052 """ 4053 Provides iterable behavior for a table 4054 """ 4055
4056 - def __init__(self, table, include_vapor=False):
4057 """ 4058 Return a Vapor record as the last record in the iteration 4059 if include_vapor is True 4060 """ 4061 self._table = table 4062 self._record = None 4063 self._include_vapor = include_vapor 4064 self._exhausted = False
4065
4066 - def __iter__(self):
4067 return self
4068
4069 - def __next__(self):
4070 while not self._exhausted: 4071 if self._index == len(self._table): 4072 break 4073 if self._index >= (len(self._table) - 1): 4074 self._index = max(self._index, len(self._table)) 4075 if self._include_vapor: 4076 return RecordVaporWare('eof', self._table) 4077 break 4078 self._index += 1 4079 record = self._table[self._index] 4080 return record 4081 self._exhausted = True 4082 raise StopIteration
4083
4084 4085 -class Table(_Navigation):
4086 """ 4087 Base class for dbf style tables 4088 """ 4089 4090 _version = 'basic memory table' 4091 _versionabbr = 'dbf' 4092 _max_fields = 255 4093 _max_records = 4294967296 4094 4095 @MutableDefault
4096 - def _field_types():
4097 return { 4098 CHAR: { 4099 'Type':'Character', 'Init':add_character, 'Blank':lambda x: b' ' * x, 'Retrieve':retrieve_character, 'Update':update_character, 4100 'Class':str, 'Empty':str, 'flags':tuple(), 4101 }, 4102 DATE: { 4103 'Type':'Date', 'Init':add_date, 'Blank':lambda x: b' ', 'Retrieve':retrieve_date, 'Update':update_date, 4104 'Class':datetime.date, 'Empty':none, 'flags':tuple(), 4105 }, 4106 NUMERIC: { 4107 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric, 4108 'Class':'default', 'Empty':none, 'flags':tuple(), 4109 }, 4110 LOGICAL: { 4111 'Type':'Logical', 'Init':add_logical, 'Blank':lambda x: b'?', 'Retrieve':retrieve_logical, 'Update':update_logical, 4112 'Class':bool, 'Empty':none, 'flags':tuple(), 4113 }, 4114 MEMO: { 4115 'Type':'Memo', 'Init':add_memo, 'Blank':lambda x: b' ', 'Retrieve':retrieve_memo, 'Update':update_memo, 4116 'Class':str, 'Empty':str, 'flags':tuple(), 4117 }, 4118 NUMERIC: { 4119 'Type':'Numeric', 'Init':add_numeric, 'Blank':lambda x: b' ' * x, 'Retrieve':retrieve_numeric, 'Update':update_numeric, 4120 'Class':'default', 'Empty':none, 'flags':tuple(), 4121 }, 4122 }
4123 @MutableDefault
4124 - def _previous_status():
4125 return []
4126 _memoext = '' 4127 _memoClass = _DbfMemo 4128 _yesMemoMask = 0 4129 _noMemoMask = 0 4130 _binary_types = tuple() # as in non-unicode character, or non-text number 4131 _character_types = (CHAR, DATE, FLOAT, LOGICAL, MEMO, NUMERIC) # field represented by text data 4132 _currency_types = tuple() # money! 4133 _date_types = (DATE, ) # dates 4134 _datetime_types = tuple() # dates w/times 4135 _decimal_types = (NUMERIC, FLOAT) # text-based numeric fields 4136 _fixed_types = (MEMO, DATE, LOGICAL) # always same length in table 4137 _logical_types = (LOGICAL, ) # logicals 4138 _memo_types = (MEMO, ) 4139 _numeric_types = (NUMERIC, FLOAT) # fields representing a number 4140 _variable_types = (CHAR, NUMERIC, FLOAT) # variable length in table 4141 _dbfTableHeader = array('B', [0] * 32) 4142 _dbfTableHeader[0] = 0 # table type - none 4143 _dbfTableHeader[8:10] = array('B', pack_short_int(33)) 4144 _dbfTableHeader[10] = 1 # record length -- one for delete flag 4145 _dbfTableHeader[29] = 0 # code page -- none, using plain ascii 4146 _dbfTableHeader = _dbfTableHeader.tobytes() 4147 _dbfTableHeaderExtra = b'' 4148 _supported_tables = () 4149 _pack_count = 0 4150 backup = None 4151
4152 - class _Indexen(object):
4153 """ 4154 implements the weakref structure for seperate indexes 4155 """ 4156
4157 - def __init__(self):
4158 self._indexen = set()
4159
4160 - def __iter__(self):
4161 self._indexen = set([s for s in self._indexen if s() is not None]) 4162 return (s() for s in self._indexen if s() is not None)
4163
4164 - def __len__(self):
4165 self._indexen = set([s for s in self._indexen if s() is not None]) 4166 return len(self._indexen)
4167
4168 - def add(self, new_index):
4169 self._indexen.add(weakref.ref(new_index)) 4170 self._indexen = set([s for s in self._indexen if s() is not None])
4171
4172 - class _MetaData(dict):
4173 """ 4174 Container class for storing per table metadata 4175 """ 4176 blankrecord = None 4177 dfd = None # file handle 4178 fields = None # field names 4179 field_count = 0 # number of fields 4180 field_types = None # dictionary of dbf type field specs 4181 filename = None # name of .dbf file 4182 ignorememos = False # True when memos should be ignored 4183 memoname = None # name of .dbt/.fpt file 4184 mfd = None # file handle 4185 memo = None # memo object 4186 memofields = None # field names of Memo type 4187 newmemofile = False # True when memo file needs to be created 4188 nulls = None # non-None when Nullable fields present 4189 user_fields = None # not counting SYSTEM fields 4190 user_field_count = 0 # also not counting SYSTEM fields
4191
4192 - class _TableHeader(object):
4193 """ 4194 represents the data block that defines a tables type and layout 4195 """ 4196
4197 - def __init__(self, data, pack_date, unpack_date):
4198 if len(data) != 32: 4199 raise BadDataError('table header should be 32 bytes, but is %d bytes' % len(data)) 4200 self.packDate = pack_date 4201 self.unpackDate = unpack_date 4202 self._data = array('B', data + b'\x0d')
4203
4204 - def codepage(self, cp=None):
4205 """ 4206 get/set code page of table 4207 """ 4208 if cp is None: 4209 return self._data[29] 4210 else: 4211 cp, sd, ld = _codepage_lookup(cp) 4212 self._data[29] = cp 4213 return cp
4214 4215 @property
4216 - def data(self):
4217 """ 4218 main data structure 4219 """ 4220 date = self.packDate(Date.today()) 4221 self._data[1:4] = array('B', date) 4222 return self._data.tobytes()
4223 4224 @data.setter
4225 - def data(self, bytes):
4226 if len(bytes) < 32: 4227 raise BadDataError("length for data of %d is less than 32" % len(bytes)) 4228 self._data[:] = array('B', bytes)
4229 4230 @property
4231 - def extra(self):
4232 "extra dbf info (located after headers, before data records)" 4233 fieldblock = self._data[32:] 4234 for i in range(len(fieldblock) // 32 + 1): 4235 cr = i * 32 4236 if fieldblock[cr] == CR: 4237 break 4238 else: 4239 raise BadDataError("corrupt field structure") 4240 cr += 33 # skip past CR 4241 return self._data[cr:].tobytes()
4242 4243 @extra.setter
4244 - def extra(self, data):
4245 fieldblock = self._data[32:] 4246 for i in range(len(fieldblock) // 32 + 1): 4247 cr = i * 32 4248 if fieldblock[cr] == CR: 4249 break 4250 else: 4251 raise BadDataError("corrupt field structure") 4252 cr += 33 # skip past CR 4253 self._data[cr:] = array('B', data) # extra 4254 self._data[8:10] = array('B', pack_short_int(len(self._data))) # start
4255 4256 @property
4257 - def field_count(self):
4258 "number of fields (read-only)" 4259 fieldblock = self._data[32:] 4260 for i in range(len(fieldblock) // 32 + 1): 4261 cr = i * 32 4262 if fieldblock[cr] == CR: 4263 break 4264 else: 4265 raise BadDataError("corrupt field structure") 4266 return len(fieldblock[:cr]) // 32
4267 4268 @property
4269 - def fields(self):
4270 """ 4271 field block structure 4272 """ 4273 fieldblock = self._data[32:] 4274 for i in range(len(fieldblock) // 32 + 1): 4275 cr = i * 32 4276 if fieldblock[cr] == CR: 4277 break 4278 else: 4279 raise BadDataError("corrupt field structure") 4280 return fieldblock[:cr].tobytes()
4281 4282 @fields.setter
4283 - def fields(self, block):
4284 fieldblock = self._data[32:] 4285 for i in range(len(fieldblock) // 32 + 1): 4286 cr = i * 32 4287 if fieldblock[cr] == CR: 4288 break 4289 else: 4290 raise BadDataError("corrupt field structure") 4291 cr += 32 # convert to indexing main structure 4292 fieldlen = len(block) 4293 if fieldlen % 32 != 0: 4294 raise BadDataError("fields structure corrupt: %d is not a multiple of 32" % fieldlen) 4295 self._data[32:cr] = array('B', block) # fields 4296 self._data[8:10] = array('B', pack_short_int(len(self._data))) # start 4297 fieldlen = fieldlen // 32 4298 recordlen = 1 # deleted flag 4299 for i in range(fieldlen): 4300 recordlen += block[i*32+16] 4301 self._data[10:12] = array('B', pack_short_int(recordlen))
4302 4303 @property
4304 - def record_count(self):
4305 """ 4306 number of records (maximum 16,777,215) 4307 """ 4308 return unpack_long_int(self._data[4:8].tobytes())
4309 4310 @record_count.setter
4311 - def record_count(self, count):
4312 self._data[4:8] = array('B', pack_long_int(count))
4313 4314 @property
4315 - def record_length(self):
4316 """ 4317 length of a record (read_only) (max of 65,535) 4318 """ 4319 return unpack_short_int(self._data[10:12].tobytes())
4320 4321 @record_length.setter
4322 - def record_length(self, length):
4323 """ 4324 to support Clipper large Character fields 4325 """ 4326 self._data[10:12] = array('B', pack_short_int(length))
4327 4328 @property
4329 - def start(self):
4330 """ 4331 starting position of first record in file (must be within first 64K) 4332 """ 4333 return unpack_short_int(self._data[8:10].tobytes())
4334 4335 @start.setter
4336 - def start(self, pos):
4337 self._data[8:10] = array('B', pack_short_int(pos))
4338 4339 @property
4340 - def update(self):
4341 """ 4342 date of last table modification (read-only) 4343 """ 4344 return self.unpackDate(self._data[1:4].tobytes())
4345 4346 @property
4347 - def version(self):
4348 """ 4349 dbf version 4350 """ 4351 return self._data[0]
4352 4353 @version.setter
4354 - def version(self, ver):
4355 self._data[0] = ver
4356
4357 - class _Table(object):
4358 """ 4359 implements the weakref table for records 4360 """ 4361
4362 - def __init__(self, count, meta):
4363 self._meta = meta 4364 self._max_count = count 4365 self._weakref_list = {} 4366 self._accesses = 0 4367 self._dead_check = 1024
4368
4369 - def __getitem__(self, index):
4370 # maybe = self._weakref_list[index]() 4371 if index < 0: 4372 if self._max_count + index < 0: 4373 raise IndexError('index %d smaller than available records' % index) 4374 index = self._max_count + index 4375 if index >= self._max_count: 4376 raise IndexError('index %d greater than available records' % index) 4377 maybe = self._weakref_list.get(index) 4378 if maybe: 4379 maybe = maybe() 4380 self._accesses += 1 4381 if self._accesses >= self._dead_check: 4382 dead = [] 4383 for key, value in self._weakref_list.items(): 4384 if value() is None: 4385 dead.append(key) 4386 for key in dead: 4387 del self._weakref_list[key] 4388 if not maybe: 4389 meta = self._meta 4390 if meta.status == CLOSED: 4391 raise DbfError("%s is closed; record %d is unavailable" % (meta.filename, index)) 4392 header = meta.header 4393 if index < 0: 4394 index += header.record_count 4395 size = header.record_length 4396 location = index * size + header.start 4397 meta.dfd.seek(location) 4398 if meta.dfd.tell() != location: 4399 raise ValueError("unable to seek to offset %d in file" % location) 4400 bytes = meta.dfd.read(size) 4401 if not bytes: 4402 raise ValueError("unable to read record data from %s at location %d" % (meta.filename, location)) 4403 maybe = Record(recnum=index, layout=meta, kamikaze=bytes, _fromdisk=True) 4404 self._weakref_list[index] = weakref.ref(maybe) 4405 return maybe
4406
4407 - def append(self, record):
4408 self._weakref_list[self._max_count] = weakref.ref(record) 4409 self._max_count += 1
4410
4411 - def clear(self):
4412 for key in list(self._weakref_list.keys()): 4413 del self._weakref_list[key] 4414 self._max_count = 0
4415
4416 - def flush(self):
4417 for maybe in self._weakref_list.values(): 4418 maybe = maybe() 4419 if maybe and not maybe._write_to_disk: 4420 raise DbfError("some records have not been written to disk")
4421
4422 - def pop(self):
4423 if not self._max_count: 4424 raise IndexError('no records exist') 4425 record = self[self._max_count-1] 4426 self._max_count -= 1 4427 return record
4428
4429 - def _build_header_fields(self):
4430 """ 4431 constructs fieldblock for disk table 4432 """ 4433 fieldblock = array('B', b'') 4434 memo = False 4435 nulls = False 4436 meta = self._meta 4437 header = meta.header 4438 header.version = header.version & self._noMemoMask 4439 meta.fields = [f for f in meta.fields if f != '_nullflags'] 4440 for field in meta.fields: 4441 layout = meta[field] 4442 if meta.fields.count(field) > 1: 4443 raise BadDataError("corrupted field structure (noticed in _build_header_fields)") 4444 fielddef = array('B', [0] * 32) 4445 fielddef[:11] = array('B', pack_str(meta.encoder(field)[0])) 4446 fielddef[11] = layout[TYPE] 4447 fielddef[12:16] = array('B', pack_long_int(layout[START])) 4448 fielddef[16] = layout[LENGTH] 4449 fielddef[17] = layout[DECIMALS] 4450 fielddef[18] = layout[FLAGS] 4451 fieldblock.extend(fielddef) 4452 if layout[TYPE] in meta.memo_types: 4453 memo = True 4454 if layout[FLAGS] & NULLABLE: 4455 nulls = True 4456 if memo: 4457 header.version = header.version | self._yesMemoMask 4458 if meta.memo is None: 4459 meta.memo = self._memoClass(meta) 4460 else: 4461 if os.path.exists(meta.memoname): 4462 if meta.mfd is not None: 4463 meta.mfd.close() 4464 4465 os.remove(meta.memoname) 4466 meta.memo = None 4467 if nulls: 4468 start = layout[START] + layout[LENGTH] 4469 length, one_more = divmod(len(meta.fields), 8) 4470 if one_more: 4471 length += 1 4472 fielddef = array('B', [0] * 32) 4473 fielddef[:11] = array('B', pack_str(b'_nullflags')) 4474 fielddef[11] = 0x30 4475 fielddef[12:16] = array('B', pack_long_int(start)) 4476 fielddef[16] = length 4477 fielddef[17] = 0 4478 fielddef[18] = BINARY | SYSTEM 4479 fieldblock.extend(fielddef) 4480 meta.fields.append('_nullflags') 4481 nullflags = ( 4482 _NULLFLAG, # type 4483 start, # start 4484 length, # length 4485 start + length, # end 4486 0, # decimals 4487 BINARY | SYSTEM, # flags 4488 none, # class 4489 none, # empty 4490 ) 4491 meta['_nullflags'] = nullflags 4492 header.fields = fieldblock.tobytes() 4493 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM] 4494 meta.user_field_count = len(meta.user_fields) 4495 Record._create_blank_data(meta)
4496
4497 - def _check_memo_integrity(self):
4498 """ 4499 checks memo file for problems 4500 """ 4501 raise NotImplementedError("_check_memo_integrity must be implemented by subclass")
4502
4503 - def _initialize_fields(self):
4504 """ 4505 builds the FieldList of names, types, and descriptions from the disk file 4506 """ 4507 raise NotImplementedError("_initialize_fields must be implemented by subclass")
4508
4509 - def _field_layout(self, i):
4510 """ 4511 Returns field information Name Type(Length[, Decimals]) 4512 """ 4513 name = self._meta.fields[i] 4514 fielddef = self._meta[name] 4515 type = FieldType(fielddef[TYPE]) 4516 length = fielddef[LENGTH] 4517 decimals = fielddef[DECIMALS] 4518 set_flags = fielddef[FLAGS] 4519 flags = [] 4520 if type in (GENERAL, PICTURE): 4521 printable_flags = NULLABLE, SYSTEM 4522 else: 4523 printable_flags = BINARY, NULLABLE, SYSTEM 4524 for flg in printable_flags: 4525 if flg & set_flags == flg: 4526 flags.append(FieldFlag(flg)) 4527 set_flags &= 255 ^ flg 4528 if flags: 4529 flags = ' ' + ' '.join(f.text for f in flags) 4530 else: 4531 flags = '' 4532 if type in self._fixed_types: 4533 description = "%s %s%s" % (name, type.symbol, flags) 4534 elif type in self._numeric_types: 4535 description = "%s %s(%d,%d)%s" % (name, type.symbol, length, decimals, flags) 4536 else: 4537 description = "%s %s(%d)%s" % (name, type.symbol, length, flags) 4538 return description
4539
4540 - def _list_fields(self, specs, sep=','):
4541 """ 4542 standardizes field specs 4543 """ 4544 if specs is None: 4545 specs = self.field_names 4546 elif isinstance(specs, basestring): 4547 specs = specs.strip(sep).split(sep) 4548 else: 4549 specs = list(specs) 4550 specs = [s.strip() for s in specs] 4551 return specs
4552
4553 - def _nav_check(self):
4554 """ 4555 Raises `DbfError` if table is closed 4556 """ 4557 if self._meta.status == CLOSED: 4558 raise DbfError('table %s is closed' % self.filename)
4559 4560 @staticmethod
4561 - def _pack_date(date):
4562 """ 4563 Returns a group of three bytes, in integer form, of the date 4564 """ 4565 # return "%c%c%c" % (date.year - 1900, date.month, date.day) 4566 return bytes([date.year - 1900, date.month, date.day])
4567 4568 @staticmethod
4569 - def _unpack_date(bytestr):
4570 """ 4571 Returns a Date() of the packed three-byte date passed in 4572 """ 4573 year, month, day = struct.unpack('<BBB', bytestr) 4574 year += 1900 4575 return Date(year, month, day)
4576
4577 - def _update_disk(self, headeronly=False):
4578 """ 4579 synchronizes the disk file with current data 4580 """ 4581 if self._meta.location == IN_MEMORY: 4582 return 4583 meta = self._meta 4584 header = meta.header 4585 fd = meta.dfd 4586 fd.seek(0) 4587 fd.write(header.data) 4588 eof = header.start + header.record_count * header.record_length 4589 if not headeronly: 4590 for record in self: 4591 record._update_disk() 4592 fd.flush() 4593 fd.truncate(eof) 4594 if self._versionabbr in ('db3', 'clp'): 4595 fd.seek(0, SEEK_END) 4596 fd.write(b'\x1a') # required for dBase III compatibility 4597 fd.flush() 4598 fd.truncate(eof + 1)
4599
4600 - def __contains__(self, data):
4601 """ 4602 data can be a record, template, dict, or tuple 4603 """ 4604 if not isinstance(data, (Record, RecordTemplate, dict, tuple)): 4605 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(data)) 4606 for record in Iter(self): 4607 if data == record: 4608 return True 4609 return False
4610
4611 - def __enter__(self):
4612 self._previous_status.append(self._meta.status) 4613 self.open() 4614 return self
4615
4616 - def __exit__(self, *exc_info):
4617 if self._previous_status.pop() == CLOSED: 4618 self.close()
4619
4620 - def __getattr__(self, name):
4621 if name in ( 4622 'binary_types', 4623 'character_types', 4624 'currency_types', 4625 'date_types', 4626 'datetime_types', 4627 'decimal_types', 4628 'fixed_types', 4629 'logical_types', 4630 'memo_types', 4631 'numeric_types', 4632 'variable_types', 4633 ): 4634 return getattr(self, '_'+name) 4635 if name in ('_table', ): 4636 if self._meta.location == ON_DISK: 4637 self._table = self._Table(len(self), self._meta) 4638 else: 4639 self._table = [] 4640 return object.__getattribute__(self, name)
4641
4642 - def __getitem__(self, value):
4643 if isinstance(value, baseinteger): 4644 if not -self._meta.header.record_count <= value < self._meta.header.record_count: 4645 raise NotFoundError("Record %d is not in table %s." % (value, self.filename)) 4646 return self._table[value] 4647 elif type(value) == slice: 4648 sequence = List(desc='%s --> %s' % (self.filename, value)) 4649 for index in range(len(self))[value]: 4650 record = self._table[index] 4651 sequence.append(record) 4652 return sequence 4653 else: 4654 raise TypeError('type <%s> not valid for indexing' % type(value))
4655
4656 - def __init__(self, filename, field_specs=None, memo_size=128, ignore_memos=False, 4657 codepage=None, default_data_types=None, field_data_types=None, # e.g. 'name':str, 'age':float 4658 dbf_type=None, on_disk=True, 4659 ):
4660 """ 4661 open/create dbf file 4662 filename should include path if needed 4663 field_specs can be either a ;-delimited string or a list of strings 4664 memo_size is always 512 for db3 memos 4665 ignore_memos is useful if the memo file is missing or corrupt 4666 read_only will load records into memory, then close the disk file 4667 keep_memos will also load any memo fields into memory 4668 meta_only will ignore all records, keeping only basic table information 4669 codepage will override whatever is set in the table itself 4670 """ 4671 4672 if not on_disk: 4673 if field_specs is None: 4674 raise DbfError("field list must be specified for memory tables") 4675 self._indexen = self._Indexen() 4676 self._meta = meta = self._MetaData() 4677 meta.max_fields = self._max_fields 4678 meta.max_records = self._max_records 4679 meta.table = weakref.ref(self) 4680 meta.filename = filename 4681 meta.fields = [] 4682 meta.user_fields = [] 4683 meta.user_field_count = 0 4684 meta.fieldtypes = fieldtypes = self._field_types 4685 meta.fixed_types = self._fixed_types 4686 meta.variable_types = self._variable_types 4687 meta.character_types = self._character_types 4688 meta.currency_types = self._currency_types 4689 meta.decimal_types = self._decimal_types 4690 meta.numeric_types = self._numeric_types 4691 meta.memo_types = self._memo_types 4692 meta.ignorememos = meta.original_ignorememos = ignore_memos 4693 meta.memo_size = memo_size 4694 meta.input_decoder = codecs.getdecoder(input_decoding) # from ascii to unicode 4695 meta.output_encoder = codecs.getencoder(input_decoding) # and back to ascii 4696 meta.header = header = self._TableHeader(self._dbfTableHeader, self._pack_date, self._unpack_date) 4697 header.extra = self._dbfTableHeaderExtra 4698 if default_data_types is None: 4699 default_data_types = dict() 4700 elif default_data_types == 'enhanced': 4701 default_data_types = { 4702 'C' : dbf.Char, 4703 'L' : dbf.Logical, 4704 'D' : dbf.Date, 4705 'T' : dbf.DateTime, 4706 } 4707 self._meta._default_data_types = default_data_types 4708 if field_data_types is None: 4709 field_data_types = dict() 4710 self._meta._field_data_types = field_data_types 4711 for field, types in default_data_types.items(): 4712 field = FieldType(field) 4713 if not isinstance(types, tuple): 4714 types = (types, ) 4715 for result_name, result_type in ezip(('Class', 'Empty', 'Null'), types): 4716 fieldtypes[field][result_name] = result_type 4717 if not on_disk: 4718 self._table = [] 4719 meta.location = IN_MEMORY 4720 meta.memoname = filename 4721 meta.header.data 4722 else: 4723 base, ext = os.path.splitext(filename) 4724 if ext.lower() != '.dbf': 4725 meta.filename = filename + '.dbf' 4726 searchname = filename + '.[Db][Bb][Ff]' 4727 else: 4728 meta.filename = filename 4729 searchname = filename 4730 matches = glob(searchname) 4731 if len(matches) == 1: 4732 meta.filename = matches[0] 4733 elif matches: 4734 raise DbfError("please specify exactly which of %r you want" % (matches, )) 4735 case = [('l','u')[c.isupper()] for c in meta.filename[-4:]] 4736 if case == ['l','l','l','l']: 4737 meta.memoname = base + self._memoext.lower() 4738 elif case == ['l','u','u','u']: 4739 meta.memoname = base + self._memoext.upper() 4740 else: 4741 meta.memoname = base + ''.join([c.lower() if case[i] == 'l' else c.upper() for i, c in enumerate(self._memoext)]) 4742 meta.location = ON_DISK 4743 if codepage is not None: 4744 header.codepage(codepage) 4745 cp, sd, ld = _codepage_lookup(codepage) 4746 self._meta.decoder = codecs.getdecoder(sd) 4747 self._meta.encoder = codecs.getencoder(sd) 4748 if field_specs: 4749 if meta.location == ON_DISK: 4750 meta.dfd = open(meta.filename, 'w+b') 4751 meta.newmemofile = True 4752 if codepage is None: 4753 header.codepage(default_codepage) 4754 cp, sd, ld = _codepage_lookup(header.codepage()) 4755 meta.decoder = codecs.getdecoder(sd) 4756 meta.encoder = codecs.getencoder(sd) 4757 meta.status = READ_WRITE 4758 self.add_fields(field_specs) 4759 else: 4760 try: 4761 dfd = meta.dfd = open(meta.filename, 'r+b') 4762 except IOError: 4763 e= sys.exc_info()[1] 4764 raise DbfError(str(e)) from None 4765 dfd.seek(0) 4766 meta.header = header = self._TableHeader(dfd.read(32), self._pack_date, self._unpack_date) 4767 if not header.version in self._supported_tables: 4768 dfd.close() 4769 dfd = None 4770 raise DbfError( 4771 "%s does not support %s [%x]" % 4772 (self._version, 4773 version_map.get(header.version, 'Unknown: %s' % header.version), 4774 header.version)) 4775 if codepage is None: 4776 cp, sd, ld = _codepage_lookup(header.codepage()) 4777 self._meta.decoder = codecs.getdecoder(sd) 4778 self._meta.encoder = codecs.getencoder(sd) 4779 fieldblock = dfd.read(header.start - 32) 4780 for i in range(len(fieldblock) // 32 + 1): 4781 fieldend = i * 32 4782 if fieldblock[fieldend] == CR: 4783 break 4784 else: 4785 raise BadDataError("corrupt field structure in header") 4786 if len(fieldblock[:fieldend]) % 32 != 0: 4787 raise BadDataError("corrupt field structure in header") 4788 old_length = header.data[10:12] 4789 header.fields = fieldblock[:fieldend] 4790 header.data = header.data[:10] + old_length + header.data[12:] # restore original for testing 4791 header.extra = fieldblock[fieldend + 1:] # skip trailing \r 4792 self._initialize_fields() 4793 self._check_memo_integrity() 4794 dfd.seek(0) 4795 4796 for field in meta.fields: 4797 field_type = meta[field][TYPE] 4798 default_field_type = ( 4799 fieldtypes[field_type]['Class'], 4800 fieldtypes[field_type]['Empty'], 4801 ) 4802 specific_field_type = field_data_types.get(field) 4803 if specific_field_type is not None and not isinstance(specific_field_type, tuple): 4804 specific_field_type = (specific_field_type, ) 4805 classes = [] 4806 for result_name, result_type in ezip( 4807 ('class', 'empty'), 4808 specific_field_type or default_field_type, 4809 ): 4810 classes.append(result_type) 4811 meta[field] = meta[field][:-2] + tuple(classes) 4812 meta.status = READ_ONLY 4813 self.close()
4814
4815 - def __iter__(self):
4816 """ 4817 iterates over the table's records 4818 """ 4819 return Iter(self)
4820
4821 - def __len__(self):
4822 """ 4823 returns number of records in table 4824 """ 4825 return self._meta.header.record_count
4826
4827 - def __new__(cls, filename, field_specs=None, memo_size=128, ignore_memos=False, 4828 codepage=None, default_data_types=None, field_data_types=None, # e.g. 'name':str, 'age':float 4829 dbf_type=None, on_disk=True, 4830 ):
4831 if dbf_type is None and isinstance(filename, Table): 4832 return filename 4833 if field_specs and dbf_type is None: 4834 dbf_type = default_type 4835 if dbf_type is not None: 4836 dbf_type = dbf_type.lower() 4837 table = table_types.get(dbf_type) 4838 if table is None: 4839 raise DbfError("Unknown table type: %s" % dbf_type) 4840 return object.__new__(table) 4841 else: 4842 base, ext = os.path.splitext(filename) 4843 if ext.lower() != '.dbf': 4844 filename = filename + '.dbf' 4845 possibles = guess_table_type(filename) 4846 if len(possibles) == 1: 4847 return object.__new__(possibles[0][2]) 4848 else: 4849 for type, desc, cls in possibles: 4850 if type == default_type: 4851 return object.__new__(cls) 4852 else: 4853 types = ', '.join(["%s" % item[1] for item in possibles]) 4854 abbrs = '[' + ' | '.join(["%s" % item[0] for item in possibles]) + ']' 4855 raise DbfError("Table could be any of %s. Please specify %s when opening" % (types, abbrs))
4856
4857 - def __bool__(self):
4858 """ 4859 True if table has any records 4860 """ 4861 return self._meta.header.record_count != 0
4862
4863 - def __repr__(self):
4864 return __name__ + ".Table(%r, status=%r)" % (self._meta.filename, self._meta.status)
4865
4866 - def __str__(self):
4867 status = self._meta.status 4868 version = version_map.get(self._meta.header.version) 4869 if version is not None: 4870 version = self._version 4871 else: 4872 version = 'unknown - ' + hex(self._meta.header.version) 4873 str = """ 4874 Table: %s 4875 Type: %s 4876 Codepage: %s 4877 Status: %s 4878 Last updated: %s 4879 Record count: %d 4880 Field count: %d 4881 Record length: %d """ % (self.filename, version 4882 , self.codepage, status, 4883 self.last_update, len(self), self.field_count, self.record_length) 4884 str += "\n --Fields--\n" 4885 for i in range(len(self.field_names)): 4886 str += "%11d) %s\n" % (i, self._field_layout(i)) 4887 return str
4888 4889 @property
4890 - def codepage(self):
4891 """ 4892 code page used for text translation 4893 """ 4894 return CodePage(code_pages[self._meta.header.codepage()][0])
4895 4896 @codepage.setter
4897 - def codepage(self, codepage):
4898 if not isinstance(codepage, CodePage): 4899 raise TypeError("codepage should be a CodePage, not a %r" % type(codepage)) 4900 meta = self._meta 4901 if meta.status != READ_WRITE: 4902 raise DbfError('%s not in read/write mode, unable to change codepage' % meta.filename) 4903 meta.header.codepage(codepage.code) 4904 meta.decoder = codecs.getdecoder(codepage.name) 4905 meta.encoder = codecs.getencoder(codepage.name) 4906 self._update_disk(headeronly=True)
4907 4908 @property
4909 - def field_count(self):
4910 """ 4911 the number of user fields in the table 4912 """ 4913 return self._meta.user_field_count
4914 4915 @property
4916 - def field_names(self):
4917 """ 4918 a list of the user fields in the table 4919 """ 4920 return self._meta.user_fields[:]
4921 4922 @property
4923 - def filename(self):
4924 """ 4925 table's file name, including path (if specified on open) 4926 """ 4927 return self._meta.filename
4928 4929 @property
4930 - def last_update(self):
4931 """ 4932 date of last update 4933 """ 4934 return self._meta.header.update
4935 4936 @property
4937 - def memoname(self):
4938 """ 4939 table's memo name (if path included in filename on open) 4940 """ 4941 return self._meta.memoname
4942 4943 @property
4944 - def record_length(self):
4945 """ 4946 number of bytes in a record (including deleted flag and null field size 4947 """ 4948 return self._meta.header.record_length
4949 4950 @property
4951 - def supported_tables(self):
4952 """ 4953 allowable table types 4954 """ 4955 return self._supported_tables
4956 4957 @property
4958 - def status(self):
4959 """ 4960 CLOSED, READ_ONLY, or READ_WRITE 4961 """ 4962 return self._meta.status
4963 4964 @property
4965 - def version(self):
4966 """ 4967 returns the dbf type of the table 4968 """ 4969 return self._version
4970
4971 - def add_fields(self, field_specs):
4972 """ 4973 adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]] 4974 backup table is created with _backup appended to name 4975 then zaps table, recreates current structure, and copies records back from the backup 4976 """ 4977 meta = self._meta 4978 if meta.status != READ_WRITE: 4979 raise DbfError('%s not in read/write mode, unable to add fields (%s)' % (meta.filename, meta.status)) 4980 header = meta.header 4981 fields = self.structure() + self._list_fields(field_specs, sep=';') 4982 if (len(fields) + ('_nullflags' in meta)) > meta.max_fields: 4983 raise DbfError( 4984 "Adding %d more field%s would exceed the limit of %d" 4985 % (len(fields), ('','s')[len(fields)==1], meta.max_fields) 4986 ) 4987 old_table = None 4988 if self: 4989 old_table = self.create_backup() 4990 self.zap() 4991 if meta.mfd is not None and not meta.ignorememos: 4992 meta.mfd.close() 4993 meta.mfd = None 4994 meta.memo = None 4995 if not meta.ignorememos: 4996 meta.newmemofile = True 4997 offset = 1 4998 for name in meta.fields: 4999 del meta[name] 5000 meta.fields[:] = [] 5001 5002 meta.blankrecord = None 5003 for field in fields: 5004 field = field.lower() 5005 pieces = field.split() 5006 name = pieces.pop(0) 5007 if '(' in pieces[0]: 5008 loc = pieces[0].index('(') 5009 pieces.insert(0, pieces[0][:loc]) 5010 pieces[1] = pieces[1][loc:] 5011 format = FieldType(pieces.pop(0)) 5012 if pieces and '(' in pieces[0]: 5013 for i, p in enumerate(pieces): 5014 if ')' in p: 5015 pieces[0:i+1] = [''.join(pieces[0:i+1])] 5016 break 5017 if name[0] == '_' or name[0].isdigit() or not name.replace('_', '').isalnum(): 5018 raise FieldSpecError("%s invalid: field names must start with a letter, and can only contain letters, digits, and _" % name) 5019 # name = unicode(name) 5020 if name in meta.fields: 5021 raise DbfError("Field '%s' already exists" % name) 5022 field_type = format 5023 if len(name) > 10: 5024 raise FieldSpecError("Maximum field name length is 10. '%s' is %d characters long." % (name, len(name))) 5025 if not field_type in meta.fieldtypes.keys(): 5026 raise FieldSpecError("Unknown field type: %s" % field_type) 5027 init = self._meta.fieldtypes[field_type]['Init'] 5028 flags = self._meta.fieldtypes[field_type]['flags'] 5029 try: 5030 length, decimals, flags = init(pieces, flags) 5031 except FieldSpecError: 5032 exc = sys.exc_info()[1] 5033 raise FieldSpecError(exc.message + ' (%s:%s)' % (meta.filename, name)) from None 5034 start = offset 5035 end = offset + length 5036 offset = end 5037 meta.fields.append(name) 5038 cls = meta.fieldtypes[field_type]['Class'] 5039 empty = meta.fieldtypes[field_type]['Empty'] 5040 meta[name] = ( 5041 field_type, 5042 start, 5043 length, 5044 end, 5045 decimals, 5046 flags, 5047 cls, 5048 empty, 5049 ) 5050 self._build_header_fields() 5051 self._update_disk() 5052 if old_table is not None: 5053 old_table.open() 5054 for record in old_table: 5055 self.append(scatter(record)) 5056 old_table.close()
5057
5058 - def allow_nulls(self, fields):
5059 """ 5060 set fields to allow null values 5061 """ 5062 meta = self._meta 5063 if meta.status != READ_WRITE: 5064 raise DbfError('%s not in read/write mode, unable to change field types' % meta.filename) 5065 elif self._versionabbr in ('db3', ): 5066 raise DbfError("Nullable fields are not allowed in %s tables" % self._version) 5067 header = meta.header 5068 fields = self._list_fields(fields) 5069 missing = set(fields) - set(self.field_names) 5070 if missing: 5071 raise FieldMissingError(', '.join(missing)) 5072 if len(self.field_names) + 1 > meta.max_fields: 5073 raise DbfError( 5074 "Adding the hidden _nullflags field would exceed the limit of %d fields for this table" 5075 % (meta.max_fields, ) 5076 ) 5077 old_table = None 5078 if self: 5079 old_table = self.create_backup() 5080 self.zap() 5081 if meta.mfd is not None and not meta.ignorememos: 5082 meta.mfd.close() 5083 meta.mfd = None 5084 meta.memo = None 5085 if not meta.ignorememos: 5086 meta.newmemofile = True 5087 for field in fields: 5088 specs = list(meta[field]) 5089 specs[FLAGS] |= NULLABLE 5090 meta[field] = tuple(specs) 5091 meta.blankrecord = None 5092 self._build_header_fields() 5093 self._update_disk() 5094 if old_table is not None: 5095 old_table.open() 5096 for record in old_table: 5097 self.append(scatter(record)) 5098 old_table.close()
5099
5100 - def append(self, data=b'', drop=False, multiple=1):
5101 """ 5102 adds <multiple> blank records, and fills fields with dict/tuple values if present 5103 """ 5104 meta = self._meta 5105 if meta.status != READ_WRITE: 5106 raise DbfError('%s not in read/write mode, unable to append records' % meta.filename) 5107 if not self.field_count: 5108 raise DbfError("No fields defined, cannot append") 5109 empty_table = len(self) == 0 5110 dictdata = False 5111 tupledata = False 5112 header = meta.header 5113 kamikaze = b'' 5114 if header.record_count == meta.max_records: 5115 raise DbfError("table %r is full; unable to add any more records" % self) 5116 if isinstance(data, (Record, RecordTemplate)): 5117 if data._meta.record_sig[0] == self._meta.record_sig[0]: 5118 kamikaze = data._data 5119 else: 5120 if isinstance(data, dict): 5121 dictdata = data 5122 data = b'' 5123 elif isinstance(data, tuple): 5124 if len(data) > self.field_count: 5125 raise DbfError("incoming data has too many values") 5126 tupledata = data 5127 data = b'' 5128 elif data: 5129 raise TypeError("data to append must be a tuple, dict, record, or template; not a %r" % type(data)) 5130 newrecord = Record(recnum=header.record_count, layout=meta, kamikaze=kamikaze) 5131 if kamikaze and meta.memofields: 5132 newrecord._start_flux() 5133 for field in meta.memofields: 5134 newrecord[field] = data[field] 5135 newrecord._commit_flux() 5136 5137 self._table.append(newrecord) 5138 header.record_count += 1 5139 if not kamikaze: 5140 try: 5141 if dictdata: 5142 gather(newrecord, dictdata, drop=drop) 5143 elif tupledata: 5144 newrecord._start_flux() 5145 for index, item in enumerate(tupledata): 5146 newrecord[index] = item 5147 newrecord._commit_flux() 5148 elif data: 5149 newrecord._start_flux() 5150 data_fields = field_names(data) 5151 my_fields = self.field_names 5152 for field in data_fields: 5153 if field not in my_fields: 5154 if not drop: 5155 raise DbfError("field %r not in table %r" % (field, self)) 5156 else: 5157 newrecord[field] = data[field] 5158 newrecord._commit_flux() 5159 except Exception: 5160 self._table.pop() # discard failed record 5161 header.record_count = header.record_count - 1 5162 self._update_disk() 5163 raise 5164 multiple -= 1 5165 if multiple: 5166 data = newrecord._data 5167 single = header.record_count 5168 total = single + multiple 5169 while single < total: 5170 multi_record = Record(single, meta, kamikaze=data) 5171 multi_record._start_flux() 5172 self._table.append(multi_record) 5173 for field in meta.memofields: 5174 multi_record[field] = newrecord[field] 5175 single += 1 5176 multi_record._commit_flux() 5177 header.record_count = total # += multiple 5178 newrecord = multi_record 5179 self._update_disk(headeronly=True)
5180
5181 - def close(self):
5182 """ 5183 closes disk files, flushing record data to disk 5184 ensures table data is available if keep_table 5185 ensures memo data is available if keep_memos 5186 """ 5187 if self._meta.location == ON_DISK and self._meta.status != CLOSED: 5188 self._table.flush() 5189 if self._meta.mfd is not None: 5190 self._meta.mfd.close() 5191 self._meta.mfd = None 5192 self._meta.dfd.close() 5193 self._meta.dfd = None 5194 self._meta.status = CLOSED
5195
5196 - def create_backup(self, new_name=None, on_disk=None):
5197 """ 5198 creates a backup table 5199 """ 5200 meta = self._meta 5201 already_open = meta.status != CLOSED 5202 if not already_open: 5203 self.open() 5204 if on_disk is None: 5205 on_disk = meta.location 5206 if not on_disk and new_name is None: 5207 new_name = self.filename + '_backup' 5208 if new_name is None: 5209 upper = self.filename.isupper() 5210 directory, filename = os.path.split(self.filename) 5211 name, ext = os.path.splitext(filename) 5212 extra = ('_backup', '_BACKUP')[upper] 5213 new_name = os.path.join(temp_dir or directory, name + extra + ext) 5214 bkup = Table(new_name, self.structure(), codepage=self.codepage.name, dbf_type=self._versionabbr, on_disk=on_disk) 5215 bkup.open() 5216 for record in self: 5217 bkup.append(record) 5218 bkup.close() 5219 self.backup = new_name 5220 if not already_open: 5221 self.close() 5222 return bkup
5223
5224 - def create_index(self, key):
5225 """ 5226 creates an in-memory index using the function key 5227 """ 5228 meta = self._meta 5229 if meta.status == CLOSED: 5230 raise DbfError('%s is closed' % meta.filename) 5231 return Index(self, key)
5232
5233 - def create_template(self, record=None, defaults=None):
5234 """ 5235 returns a record template that can be used like a record 5236 """ 5237 return RecordTemplate(self._meta, original_record=record, defaults=defaults)
5238
5239 - def delete_fields(self, doomed):
5240 """ 5241 removes field(s) from the table 5242 creates backup files with _backup appended to the file name, 5243 then modifies current structure 5244 """ 5245 meta = self._meta 5246 if meta.status != READ_WRITE: 5247 raise DbfError('%s not in read/write mode, unable to delete fields' % meta.filename) 5248 doomed = self._list_fields(doomed) 5249 header = meta.header 5250 for victim in doomed: 5251 if victim not in meta.user_fields: 5252 raise DbfError("field %s not in table -- delete aborted" % victim) 5253 old_table = None 5254 if self: 5255 old_table = self.create_backup() 5256 self.zap() 5257 if meta.mfd is not None and not meta.ignorememos: 5258 meta.mfd.close() 5259 meta.mfd = None 5260 meta.memo = None 5261 if not meta.ignorememos: 5262 meta.newmemofile = True 5263 if '_nullflags' in meta.fields: 5264 doomed.append('_nullflags') 5265 for victim in doomed: 5266 layout = meta[victim] 5267 meta.fields.pop(meta.fields.index(victim)) 5268 start = layout[START] 5269 end = layout[END] 5270 for field in meta.fields: 5271 if meta[field][START] == end: 5272 specs = list(meta[field]) 5273 end = specs[END] #self._meta[field][END] 5274 specs[START] = start #self._meta[field][START] = start 5275 specs[END] = start + specs[LENGTH] #self._meta[field][END] = start + self._meta[field][LENGTH] 5276 start = specs[END] #self._meta[field][END] 5277 meta[field] = tuple(specs) 5278 self._build_header_fields() 5279 self._update_disk() 5280 for name in list(meta): 5281 if name not in meta.fields: 5282 del meta[name] 5283 if old_table is not None: 5284 old_table.open() 5285 for record in old_table: 5286 self.append(scatter(record), drop=True) 5287 old_table.close()
5288
5289 - def disallow_nulls(self, fields):
5290 """ 5291 set fields to not allow null values 5292 """ 5293 meta = self._meta 5294 if meta.status != READ_WRITE: 5295 raise DbfError('%s not in read/write mode, unable to change field types' % meta.filename) 5296 fields = self._list_fields(fields) 5297 missing = set(fields) - set(self.field_names) 5298 if missing: 5299 raise FieldMissingError(', '.join(missing)) 5300 old_table = None 5301 if self: 5302 old_table = self.create_backup() 5303 self.zap() 5304 if meta.mfd is not None and not meta.ignorememos: 5305 meta.mfd.close() 5306 meta.mfd = None 5307 meta.memo = None 5308 if not meta.ignorememos: 5309 meta.newmemofile = True 5310 for field in fields: 5311 specs = list(meta[field]) 5312 specs[FLAGS] &= 0xff ^ NULLABLE 5313 meta[field] = tuple(specs) 5314 meta.blankrecord = None 5315 self._build_header_fields() 5316 self._update_disk() 5317 if old_table is not None: 5318 old_table.open() 5319 for record in old_table: 5320 self.append(scatter(record)) 5321 old_table.close()
5322
5323 - def field_info(self, field):
5324 """ 5325 returns (field type, size, dec, class) of field 5326 """ 5327 if field in self.field_names: 5328 field = self._meta[field] 5329 return FieldInfo(field[TYPE], field[LENGTH], field[DECIMALS], field[CLASS]) 5330 raise FieldMissingError("%s is not a field in %s" % (field, self.filename))
5331
5332 - def index(self, record, start=None, stop=None):
5333 """ 5334 returns the index of record between start and stop 5335 start and stop default to the first and last record 5336 """ 5337 if not isinstance(record, (Record, RecordTemplate, dict, tuple)): 5338 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(record)) 5339 meta = self._meta 5340 if meta.status == CLOSED: 5341 raise DbfError('%s is closed' % meta.filename) 5342 if start is None: 5343 start = 0 5344 if stop is None: 5345 stop = len(self) 5346 for i in range(start, stop): 5347 if record == (self[i]): 5348 return i 5349 else: 5350 raise NotFoundError("dbf.Table.index(x): x not in table", data=record)
5351
5352 - def new(self, filename, field_specs=None, memo_size=None, ignore_memos=None, codepage=None, default_data_types=None, field_data_types=None, on_disk=True):
5353 """ 5354 returns a new table of the same type 5355 """ 5356 if field_specs is None: 5357 field_specs = self.structure() 5358 if on_disk: 5359 path, name = os.path.split(filename) 5360 if path == "": 5361 filename = os.path.join(os.path.split(self.filename)[0], filename) 5362 elif name == "": 5363 filename = os.path.join(path, os.path.split(self.filename)[1]) 5364 if memo_size is None: 5365 memo_size = self._meta.memo_size 5366 if ignore_memos is None: 5367 ignore_memos = self._meta.ignorememos 5368 if codepage is None: 5369 codepage = self._meta.header.codepage()#[0] 5370 if default_data_types is None: 5371 default_data_types = self._meta._default_data_types 5372 if field_data_types is None: 5373 field_data_types = self._meta._field_data_types 5374 return Table(filename, field_specs, memo_size, ignore_memos, codepage, default_data_types, field_data_types, dbf_type=self._versionabbr, on_disk=on_disk)
5375
5376 - def nullable_field(self, field):
5377 """ 5378 returns True if field allows Nulls 5379 """ 5380 if field not in self.field_names: 5381 raise MissingField(field) 5382 return bool(self._meta[field][FLAGS] & NULLABLE)
5383
5384 - def open(self, mode=READ_WRITE):
5385 """ 5386 (re)opens disk table, (re)initializes data structures 5387 """ 5388 if mode not in (READ_WRITE, READ_ONLY): 5389 raise DbfError("mode for open must be 'read-write' or 'read-only', not %r" % mode) 5390 meta = self._meta 5391 if meta.status == mode: 5392 return self # no-op 5393 meta.status = mode 5394 if meta.location == IN_MEMORY: 5395 return self 5396 if '_table' in dir(self): 5397 del self._table 5398 dfd = meta.dfd = open(meta.filename, 'r+b') 5399 dfd.seek(0) 5400 header = meta.header = self._TableHeader(dfd.read(32), self._pack_date, self._unpack_date) 5401 if not header.version in self._supported_tables: 5402 dfd.close() 5403 dfd = None 5404 raise DbfError("Unsupported dbf type: %s [%x]" % (version_map.get(header.version, 'Unknown: %s' % header.version), header.version)) 5405 fieldblock = dfd.read(header.start - 32) 5406 for i in range(len(fieldblock) // 32 + 1): 5407 fieldend = i * 32 5408 if fieldblock[fieldend] == CR: 5409 break 5410 else: 5411 raise BadDataError("corrupt field structure in header") 5412 if len(fieldblock[:fieldend]) % 32 != 0: 5413 raise BadDataError("corrupt field structure in header") 5414 header.fields = fieldblock[:fieldend] 5415 header.extra = fieldblock[fieldend + 1:] # skip trailing \r 5416 self._meta.ignorememos = self._meta.original_ignorememos 5417 self._initialize_fields() 5418 self._check_memo_integrity() 5419 self._index = -1 5420 dfd.seek(0) 5421 return self
5422
5423 - def pack(self):
5424 """ 5425 physically removes all deleted records 5426 """ 5427 meta = self._meta 5428 if meta.status != READ_WRITE: 5429 raise DbfError('%s not in read/write mode, unable to pack records' % meta.filename) 5430 for dbfindex in self._indexen: 5431 dbfindex._clear() 5432 newtable = [] 5433 index = 0 5434 for record in self._table: 5435 if is_deleted(record): 5436 record._recnum = -1 5437 else: 5438 record._recnum = index 5439 newtable.append(record) 5440 index += 1 5441 if meta.location == ON_DISK: 5442 self._table.clear() 5443 else: 5444 self._table[:] = [] 5445 for record in newtable: 5446 self._table.append(record) 5447 self._pack_count += 1 5448 self._meta.header.record_count = index 5449 self._index = -1 5450 self._update_disk() 5451 self.reindex()
5452
5453 - def query(self, criteria):
5454 """ 5455 criteria is a string that will be converted into a function that returns 5456 a List of all matching records 5457 """ 5458 meta = self._meta 5459 if meta.status == CLOSED: 5460 raise DbfError('%s is closed' % meta.filename) 5461 return pql(self, criteria)
5462
5463 - def reindex(self):
5464 """ 5465 reprocess all indices for this table 5466 """ 5467 meta = self._meta 5468 if meta.status == CLOSED: 5469 raise DbfError('%s is closed' % meta.filename) 5470 for dbfindex in self._indexen: 5471 dbfindex._reindex()
5472
5473 - def rename_field(self, oldname, newname):
5474 """ 5475 renames an existing field 5476 """ 5477 meta = self._meta 5478 if meta.status != READ_WRITE: 5479 raise DbfError('%s not in read/write mode, unable to change field names' % meta.filename) 5480 if self: 5481 self.create_backup() 5482 if not oldname in self._meta.user_fields: 5483 raise FieldMissingError("field --%s-- does not exist -- cannot rename it." % oldname) 5484 if newname[0] == '_' or newname[0].isdigit() or not newname.replace('_', '').isalnum(): 5485 raise FieldSpecError("field names cannot start with _ or digits, and can only contain the _, letters, and digits") 5486 newname = newname.lower() 5487 if newname in self._meta.fields: 5488 raise DbfError("field --%s-- already exists" % newname) 5489 if len(newname) > 10: 5490 raise FieldSpecError("maximum field name length is 10. '%s' is %d characters long." % (newname, len(newname))) 5491 self._meta[newname] = self._meta[oldname] 5492 self._meta.fields[self._meta.fields.index(oldname)] = newname 5493 self._build_header_fields() 5494 self._update_disk(headeronly=True)
5495
5496 - def resize_field(self, chosen, new_size):
5497 """ 5498 resizes field (C only at this time) 5499 creates backup file, then modifies current structure 5500 """ 5501 meta = self._meta 5502 if meta.status != READ_WRITE: 5503 raise DbfError('%s not in read/write mode, unable to change field size' % meta.filename) 5504 if not 0 < new_size < 256: 5505 raise DbfError("new_size must be between 1 and 255 (use delete_fields to remove a field)") 5506 chosen = self._list_fields(chosen) 5507 for candidate in chosen: 5508 if candidate not in self._meta.user_fields: 5509 raise DbfError("field %s not in table -- resize aborted" % candidate) 5510 elif self.field_info(candidate).field_type != FieldType.CHAR: 5511 raise DbfError("field %s is not Character -- resize aborted" % candidate) 5512 if self: 5513 old_table = self.create_backup() 5514 self.zap() 5515 if meta.mfd is not None and not meta.ignorememos: 5516 meta.mfd.close() 5517 meta.mfd = None 5518 meta.memo = None 5519 if not meta.ignorememos: 5520 meta.newmemofile = True 5521 struct = self.structure() 5522 meta.user_fields[:] = [] 5523 new_struct = [] 5524 for field_spec in struct: 5525 name, spec = field_spec.split(' ', 1) 5526 if name in chosen: 5527 spec = "C(%d)" % new_size 5528 new_struct.append(' '.join([name, spec])) 5529 self.add_fields(';'.join(new_struct)) 5530 if old_table is not None: 5531 old_table.open() 5532 for record in old_table: 5533 self.append(scatter(record), drop=True) 5534 old_table.close()
5535
5536 - def structure(self, fields=None):
5537 """ 5538 return field specification list suitable for creating same table layout 5539 fields should be a list of fields or None for all fields in table 5540 """ 5541 field_specs = [] 5542 fields = self._list_fields(fields) 5543 try: 5544 for name in fields: 5545 field_specs.append(self._field_layout(self.field_names.index(name))) 5546 except ValueError: 5547 raise DbfError("field %s does not exist" % name) from None 5548 return field_specs
5549
5550 - def zap(self):
5551 """ 5552 removes all records from table -- this cannot be undone! 5553 """ 5554 meta = self._meta 5555 if meta.status != READ_WRITE: 5556 raise DbfError('%s not in read/write mode, unable to zap table' % meta.filename) 5557 if meta.location == IN_MEMORY: 5558 self._table[:] = [] 5559 else: 5560 self._table.clear() 5561 if meta.memo: 5562 meta.memo._zap() 5563 meta.header.record_count = 0 5564 self._index = -1 5565 self._update_disk()
5566
5567 5568 -class Db3Table(Table):
5569 """ 5570 Provides an interface for working with dBase III tables. 5571 """ 5572 5573 _version = 'dBase III Plus' 5574 _versionabbr = 'db3' 5575 5576 @MutableDefault
5577 - def _field_types():
5578 return { 5579 CHAR: { 5580 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_character, 5581 'Class':str, 'Empty':str, 'flags':tuple(), 5582 }, 5583 DATE: { 5584 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: b' ', 'Init':add_date, 5585 'Class':datetime.date, 'Empty':none, 'flags':tuple(), 5586 }, 5587 NUMERIC: { 5588 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric, 5589 'Class':'default', 'Empty':none, 'flags':tuple(), 5590 }, 5591 LOGICAL: { 5592 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: b'?', 'Init':add_logical, 5593 'Class':bool, 'Empty':none, 'flags':tuple(), 5594 }, 5595 MEMO: { 5596 'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_memo, 5597 'Class':str, 'Empty':str, 'flags':tuple(), 5598 }, 5599 NUMERIC: { 5600 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric, 5601 'Class':'default', 'Empty':none, 'flags':tuple(), 5602 } }
5603 5604 _memoext = '.dbt' 5605 _memoClass = _Db3Memo 5606 _yesMemoMask = 0x80 5607 _noMemoMask = 0x7f 5608 _binary_types = () 5609 _character_types = (CHAR, MEMO) 5610 _currency_types = tuple() 5611 _date_types = (DATE, ) 5612 _datetime_types = tuple() 5613 _decimal_types = (NUMERIC, FLOAT) 5614 _fixed_types = (DATE, LOGICAL, MEMO) 5615 _logical_types = (LOGICAL, ) 5616 _memo_types = (MEMO, ) 5617 _numeric_types = (NUMERIC, FLOAT) 5618 _variable_types = (CHAR, NUMERIC) 5619 _dbfTableHeader = array('B', [0] * 32) 5620 _dbfTableHeader[0] = 3 # version - dBase III w/o memo's 5621 _dbfTableHeader[8:10] = array('B', pack_short_int(33)) 5622 _dbfTableHeader[10] = 1 # record length -- one for delete flag 5623 _dbfTableHeader[29] = 3 # code page -- 437 US-MS DOS 5624 _dbfTableHeader = _dbfTableHeader.tobytes() 5625 _dbfTableHeaderExtra = b'' 5626 _supported_tables = (0x03, 0x83) 5627
5628 - def _check_memo_integrity(self):
5629 """ 5630 dBase III and Clipper 5631 """ 5632 if not self._meta.ignorememos: 5633 memo_fields = False 5634 for field in self._meta.fields: 5635 if self._meta[field][TYPE] in self._memo_types: 5636 memo_fields = True 5637 break 5638 if memo_fields and self._meta.header.version != 0x83: 5639 self._meta.dfd.close() 5640 self._meta.dfd = None 5641 raise BadDataError("Table structure corrupt: memo fields exist, header declares no memos") 5642 elif memo_fields and not os.path.exists(self._meta.memoname): 5643 self._meta.dfd.close() 5644 self._meta.dfd = None 5645 raise BadDataError("Table structure corrupt: memo fields exist without memo file") 5646 if memo_fields: 5647 try: 5648 self._meta.memo = self._memoClass(self._meta) 5649 except Exception: 5650 exc = sys.exc_info()[1] 5651 self._meta.dfd.close() 5652 self._meta.dfd = None 5653 raise BadDataError("Table structure corrupt: unable to use memo file (%s)" % exc.args[-1]) from None
5654
5655 - def _initialize_fields(self):
5656 """ 5657 builds the FieldList of names, types, and descriptions 5658 """ 5659 old_fields = defaultdict(dict) 5660 meta = self._meta 5661 for name in meta.fields: 5662 old_fields[name]['type'] = meta[name][TYPE] 5663 old_fields[name]['empty'] = meta[name][EMPTY] 5664 old_fields[name]['class'] = meta[name][CLASS] 5665 meta.fields[:] = [] 5666 offset = 1 5667 fieldsdef = meta.header.fields 5668 if len(fieldsdef) % 32 != 0: 5669 raise BadDataError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 5670 if len(fieldsdef) // 32 != meta.header.field_count: 5671 raise BadDataError("Header shows %d fields, but field definition block has %d fields" % (meta.header.field_count, len(fieldsdef) // 32)) 5672 total_length = meta.header.record_length 5673 for i in range(meta.header.field_count): 5674 fieldblock = fieldsdef[i*32:(i+1)*32] 5675 name = self._meta.decoder(unpack_str(fieldblock[:11]))[0] 5676 type = fieldblock[11] 5677 if not type in meta.fieldtypes: 5678 raise BadDataError("Unknown field type: %s" % type) 5679 start = offset 5680 length = fieldblock[16] 5681 offset += length 5682 end = start + length 5683 decimals = fieldblock[17] 5684 flags = fieldblock[18] 5685 if name in meta.fields: 5686 raise BadDataError('Duplicate field name found: %s' % name) 5687 meta.fields.append(name) 5688 if name in old_fields and old_fields[name]['type'] == type: 5689 cls = old_fields[name]['class'] 5690 empty = old_fields[name]['empty'] 5691 else: 5692 cls = meta.fieldtypes[type]['Class'] 5693 empty = meta.fieldtypes[type]['Empty'] 5694 meta[name] = ( 5695 type, 5696 start, 5697 length, 5698 end, 5699 decimals, 5700 flags, 5701 cls, 5702 empty, 5703 ) 5704 if offset != total_length: 5705 raise BadDataError("Header shows record length of %d, but calculated record length is %d" % (total_length, offset)) 5706 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM] 5707 meta.user_field_count = len(meta.user_fields) 5708 Record._create_blank_data(meta)
5709
5710 5711 -class ClpTable(Db3Table):
5712 """ 5713 Provides an interface for working with Clipper tables. 5714 """ 5715 5716 _version = 'Clipper 5' 5717 _versionabbr = 'clp' 5718 5719 @MutableDefault
5720 - def _field_types():
5721 return { 5722 CHAR: { 5723 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_clp_character, 5724 'Class':str, 'Empty':str, 'flags':tuple(), 5725 }, 5726 DATE: { 5727 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: b' ', 'Init':add_date, 5728 'Class':datetime.date, 'Empty':none, 'flags':tuple(), 5729 }, 5730 NUMERIC: { 5731 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric, 5732 'Class':'default', 'Empty':none, 'flags':tuple(), 5733 }, 5734 LOGICAL: { 5735 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: b'?', 'Init':add_logical, 5736 'Class':bool, 'Empty':none, 'flags':tuple(), 5737 }, 5738 MEMO: { 5739 'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_memo, 5740 'Class':str, 'Empty':str, 'flags':tuple(), 5741 }, 5742 NUMERIC: { 5743 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_numeric, 5744 'Class':'default', 'Empty':none, 'flags':tuple(), 5745 } }
5746 5747 _memoext = '.dbt' 5748 _memoClass = _Db3Memo 5749 _yesMemoMask = 0x80 5750 _noMemoMask = 0x7f 5751 _binary_types = () 5752 _character_types = (CHAR, MEMO) 5753 _currency_types = tuple() 5754 _date_types = (DATE, ) 5755 _datetime_types = tuple() 5756 _decimal_types = (NUMERIC, FLOAT) 5757 _fixed_types = (DATE, LOGICAL, MEMO) 5758 _logical_types = (LOGICAL, ) 5759 _memo_types = (MEMO, ) 5760 _numeric_types = (NUMERIC, FLOAT) 5761 _variable_types = (CHAR, NUMERIC) 5762 _dbfTableHeader = array('B', [0] * 32) 5763 _dbfTableHeader[0] = 3 # version - dBase III w/o memo's 5764 _dbfTableHeader[8:10] = array('B', pack_short_int(33)) 5765 _dbfTableHeader[10] = 1 # record length -- one for delete flag 5766 _dbfTableHeader[29] = 3 # code page -- 437 US-MS DOS 5767 _dbfTableHeader = _dbfTableHeader.tobytes() 5768 _dbfTableHeaderExtra = b'' 5769 _supported_tables = (0x03, 0x83) 5770
5771 - class _TableHeader(Table._TableHeader):
5772 """ 5773 represents the data block that defines a tables type and layout 5774 """ 5775 5776 @property
5777 - def fields(self):
5778 "field block structure" 5779 fieldblock = self._data[32:] 5780 for i in range(len(fieldblock)//32+1): 5781 cr = i * 32 5782 if fieldblock[cr] == CR: 5783 break 5784 else: 5785 raise BadDataError("corrupt field structure") 5786 return fieldblock[:cr].tobytes()
5787 5788 @fields.setter
5789 - def fields(self, block):
5790 fieldblock = self._data[32:] 5791 for i in range(len(fieldblock)//32+1): 5792 cr = i * 32 5793 if fieldblock[cr] == CR: 5794 break 5795 else: 5796 raise BadDataError("corrupt field structure") 5797 cr += 32 # convert to indexing main structure 5798 fieldlen = len(block) 5799 if fieldlen % 32 != 0: 5800 raise BadDataError("fields structure corrupt: %d is not a multiple of 32" % fieldlen) 5801 self._data[32:cr] = array('B', block) # fields 5802 self._data[8:10] = array('B', pack_short_int(len(self._data))) # start 5803 fieldlen = fieldlen // 32 5804 recordlen = 1 # deleted flag 5805 for i in range(fieldlen): 5806 recordlen += block[i*32+16] 5807 if block[i*32+11] == CHAR: 5808 recordlen += block[i*32+17] * 256 5809 self._data[10:12] = array('B', pack_short_int(recordlen))
5810 5811
5812 - def _build_header_fields(self):
5813 """ 5814 constructs fieldblock for disk table 5815 """ 5816 fieldblock = array('B', b'') 5817 memo = False 5818 nulls = False 5819 meta = self._meta 5820 header = meta.header 5821 header.version = header.version & self._noMemoMask 5822 meta.fields = [f for f in meta.fields if f != '_nullflags'] 5823 total_length = 1 # delete flag 5824 for field in meta.fields: 5825 layout = meta[field] 5826 if meta.fields.count(field) > 1: 5827 raise BadDataError("corrupted field structure (noticed in _build_header_fields)") 5828 fielddef = array('B', [0] * 32) 5829 fielddef[:11] = array('B', pack_str(meta.encoder(field)[0])) 5830 fielddef[11] = layout[TYPE] 5831 fielddef[12:16] = array('B', pack_long_int(layout[START])) 5832 total_length += layout[LENGTH] 5833 if layout[TYPE] == CHAR: # long character field 5834 fielddef[16] = layout[LENGTH] % 256 5835 fielddef[17] = layout[LENGTH] // 256 5836 else: 5837 fielddef[16] = layout[LENGTH] 5838 fielddef[17] = layout[DECIMALS] 5839 fielddef[18] = layout[FLAGS] 5840 fieldblock.extend(fielddef) 5841 if layout[TYPE] in meta.memo_types: 5842 memo = True 5843 if layout[FLAGS] & NULLABLE: 5844 nulls = True 5845 if memo: 5846 header.version = header.version | self._yesMemoMask 5847 if meta.memo is None: 5848 meta.memo = self._memoClass(meta) 5849 else: 5850 if os.path.exists(meta.memoname): 5851 if meta.mfd is not None: 5852 meta.mfd.close() 5853 5854 os.remove(meta.memoname) 5855 meta.memo = None 5856 if nulls: 5857 start = layout[START] + layout[LENGTH] 5858 length, one_more = divmod(len(meta.fields), 8) 5859 if one_more: 5860 length += 1 5861 fielddef = array('B', [0] * 32) 5862 fielddef[:11] = array('B', pack_str(b'_nullflags')) 5863 fielddef[11] = FieldType._NULLFLAG 5864 fielddef[12:16] = array('B', pack_long_int(start)) 5865 fielddef[16] = length 5866 fielddef[17] = 0 5867 fielddef[18] = BINARY | SYSTEM 5868 fieldblock.extend(fielddef) 5869 meta.fields.append('_nullflags') 5870 nullflags = ( 5871 _NULLFLAG, # type 5872 start, # start 5873 length, # length 5874 start + length, # end 5875 0, # decimals 5876 BINARY | SYSTEM, # flags 5877 none, # class 5878 none, # empty 5879 ) 5880 meta['_nullflags'] = nullflags 5881 header.fields = fieldblock.tobytes() 5882 header.record_length = total_length 5883 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM] 5884 meta.user_field_count = len(meta.user_fields) 5885 Record._create_blank_data(meta)
5886
5887 - def _initialize_fields(self):
5888 """ 5889 builds the FieldList of names, types, and descriptions 5890 """ 5891 meta = self._meta 5892 old_fields = defaultdict(dict) 5893 for name in meta.fields: 5894 old_fields[name]['type'] = meta[name][TYPE] 5895 old_fields[name]['empty'] = meta[name][EMPTY] 5896 old_fields[name]['class'] = meta[name][CLASS] 5897 meta.fields[:] = [] 5898 offset = 1 5899 fieldsdef = meta.header.fields 5900 if len(fieldsdef) % 32 != 0: 5901 raise BadDataError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 5902 if len(fieldsdef) // 32 != meta.header.field_count: 5903 raise BadDataError("Header shows %d fields, but field definition block has %d fields" 5904 (meta.header.field_count, len(fieldsdef) // 32)) 5905 total_length = meta.header.record_length 5906 for i in range(meta.header.field_count): 5907 fieldblock = fieldsdef[i*32:(i+1)*32] 5908 name = self._meta.decoder(unpack_str(fieldblock[:11]))[0] 5909 type = fieldblock[11] 5910 if not type in meta.fieldtypes: 5911 raise BadDataError("Unknown field type: %s" % type) 5912 start = offset 5913 length = fieldblock[16] 5914 decimals = fieldblock[17] 5915 if type == CHAR: 5916 length += decimals * 256 5917 offset += length 5918 end = start + length 5919 flags = fieldblock[18] 5920 if name in meta.fields: 5921 raise BadDataError('Duplicate field name found: %s' % name) 5922 meta.fields.append(name) 5923 if name in old_fields and old_fields[name]['type'] == type: 5924 cls = old_fields[name]['class'] 5925 empty = old_fields[name]['empty'] 5926 else: 5927 cls = meta.fieldtypes[type]['Class'] 5928 empty = meta.fieldtypes[type]['Empty'] 5929 meta[name] = ( 5930 type, 5931 start, 5932 length, 5933 end, 5934 decimals, 5935 flags, 5936 cls, 5937 empty, 5938 ) 5939 if offset != total_length: 5940 raise BadDataError("Header shows record length of %d, but calculated record length is %d" 5941 (total_length, offset)) 5942 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM] 5943 meta.user_field_count = len(meta.user_fields) 5944 Record._create_blank_data(meta)
5945
5946 5947 -class FpTable(Table):
5948 """ 5949 Provides an interface for working with FoxPro 2 tables 5950 """ 5951 5952 _version = 'Foxpro' 5953 _versionabbr = 'fp' 5954 5955 @MutableDefault
5956 - def _field_types():
5957 return { 5958 CHAR: { 5959 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_character, 5960 'Class':str, 'Empty':str, 'flags':('binary', 'nocptrans', 'null', ), 5961 }, 5962 FLOAT: { 5963 'Type':'Float', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_numeric, 5964 'Class':'default', 'Empty':none, 'flags':('null', ), 5965 }, 5966 NUMERIC: { 5967 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_numeric, 5968 'Class':'default', 'Empty':none, 'flags':('null', ), 5969 }, 5970 LOGICAL: { 5971 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: b'?', 'Init':add_logical, 5972 'Class':bool, 'Empty':none, 'flags':('null', ), 5973 }, 5974 DATE: { 5975 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: b' ', 'Init':add_date, 5976 'Class':datetime.date, 'Empty':none, 'flags':('null', ), 5977 }, 5978 MEMO: { 5979 'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_memo, 5980 'Class':str, 'Empty':str, 'flags':('binary', 'nocptrans', 'null', ), 5981 }, 5982 GENERAL: { 5983 'Type':'General', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_binary_memo, 5984 'Class':bytes, 'Empty':bytes, 'flags':('null', ), 5985 }, 5986 PICTURE: { 5987 'Type':'Picture', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ', 'Init':add_binary_memo, 5988 'Class':bytes, 'Empty':bytes, 'flags':('null', ), 5989 }, 5990 _NULLFLAG: { 5991 'Type':'_NullFlags', 'Retrieve':unsupported_type, 'Update':unsupported_type, 'Blank':lambda x: b'\x00' * x, 'Init':None, 5992 'Class':none, 'Empty':none, 'flags':('binary', 'system', ), 5993 } }
5994 5995 _memoext = '.fpt' 5996 _memoClass = _VfpMemo 5997 _yesMemoMask = 0xf5 # 1111 0101 5998 _noMemoMask = 0x03 # 0000 0011 5999 _binary_types = (GENERAL, MEMO, PICTURE) 6000 # _character_types = ('C', 'D', 'F', 'L', 'M', 'N') # field representing character data 6001 _character_types = (CHAR, DATE, FLOAT, LOGICAL, MEMO, NUMERIC) # field representing character data 6002 _currency_types = tuple() 6003 _date_types = (DATE, ) 6004 _datetime_types = tuple() 6005 # _fixed_types = ('D', 'G', 'L', 'M', 'P') 6006 _fixed_types = (DATE, GENERAL, LOGICAL, MEMO, PICTURE) 6007 _logical_types = (LOGICAL, ) 6008 _memo_types = (GENERAL, MEMO, PICTURE) 6009 _numeric_types = (FLOAT, NUMERIC) 6010 _text_types = (CHAR, MEMO) 6011 _variable_types = (CHAR, FLOAT, NUMERIC) 6012 _supported_tables = (0x03, 0xf5) 6013 _dbfTableHeader = array('B', [0] * 32) 6014 _dbfTableHeader[0] = 0x30 # version - Foxpro 6 0011 0000 6015 _dbfTableHeader[8:10] = array('B', pack_short_int(33 + 263)) 6016 _dbfTableHeader[10] = 1 # record length -- one for delete flag 6017 _dbfTableHeader[29] = 3 # code page -- 437 US-MS DOS 6018 _dbfTableHeader = _dbfTableHeader.tobytes() 6019 _dbfTableHeaderExtra = b'\x00' * 263 6020
6021 - def _check_memo_integrity(self):
6022 if not self._meta.ignorememos: 6023 memo_fields = False 6024 for field in self._meta.fields: 6025 if self._meta[field][TYPE] in self._memo_types: 6026 memo_fields = True 6027 break 6028 if memo_fields and not os.path.exists(self._meta.memoname): 6029 self._meta.dfd.close() 6030 self._meta.dfd = None 6031 raise BadDataError("Table structure corrupt: memo fields exist without memo file") 6032 elif not memo_fields and os.path.exists(self._meta.memoname): 6033 self._meta.dfd.close() 6034 self._meta.dfd = None 6035 raise BadDataError("Table structure corrupt: no memo fields exist but memo file does") 6036 if memo_fields: 6037 try: 6038 self._meta.memo = self._memoClass(self._meta) 6039 except Exception: 6040 exc = sys.exc_info()[1] 6041 self._meta.dfd.close() 6042 self._meta.dfd = None 6043 raise BadDataError("Table structure corrupt: unable to use memo file (%s)" % exc.args[-1]) from None
6044
6045 - def _initialize_fields(self):
6046 """ 6047 builds the FieldList of names, types, and descriptions 6048 """ 6049 meta = self._meta 6050 old_fields = defaultdict(dict) 6051 for name in meta.fields: 6052 old_fields[name]['type'] = meta[name][TYPE] 6053 old_fields[name]['class'] = meta[name][CLASS] 6054 old_fields[name]['empty'] = meta[name][EMPTY] 6055 meta.fields[:] = [] 6056 offset = 1 6057 fieldsdef = meta.header.fields 6058 if len(fieldsdef) % 32 != 0: 6059 raise BadDataError("field definition block corrupt: %d bytes in size" % len(fieldsdef)) 6060 if len(fieldsdef) // 32 != meta.header.field_count: 6061 raise BadDataError("Header shows %d fields, but field definition block has %d fields" 6062 (meta.header.field_count, len(fieldsdef) // 32)) 6063 total_length = meta.header.record_length 6064 for i in range(meta.header.field_count): 6065 fieldblock = fieldsdef[i*32:(i+1)*32] 6066 name = self._meta.decoder(unpack_str(fieldblock[:11]))[0] 6067 type = fieldblock[11] 6068 if not type in meta.fieldtypes: 6069 raise BadDataError("Unknown field type: %s" % type) 6070 start = offset 6071 length = fieldblock[16] 6072 offset += length 6073 end = start + length 6074 decimals = fieldblock[17] 6075 flags = fieldblock[18] 6076 if name in meta.fields: 6077 raise BadDataError('Duplicate field name found: %s' % name) 6078 meta.fields.append(name) 6079 if name in old_fields and old_fields[name]['type'] == type: 6080 cls = old_fields[name]['class'] 6081 empty = old_fields[name]['empty'] 6082 else: 6083 cls = meta.fieldtypes[type]['Class'] 6084 empty = meta.fieldtypes[type]['Empty'] 6085 meta[name] = ( 6086 type, 6087 start, 6088 length, 6089 end, 6090 decimals, 6091 flags, 6092 cls, 6093 empty, 6094 ) 6095 if offset != total_length: 6096 raise BadDataError("Header shows record length of %d, but calculated record length is %d" % (total_length, offset)) 6097 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM] 6098 meta.user_field_count = len(meta.user_fields) 6099 Record._create_blank_data(meta)
6100 6101 @staticmethod
6102 - def _pack_date(date):
6103 """ 6104 Returns a group of three bytes, in integer form, of the date 6105 """ 6106 # return "%c%c%c" % (date.year - 2000, date.month, date.day) 6107 return bytes([date.year - 2000, date.month, date.day])
6108 6109 @staticmethod
6110 - def _unpack_date(bytestr):
6111 """ 6112 Returns a Date() of the packed three-byte date passed in 6113 """ 6114 year, month, day = struct.unpack('<BBB', bytestr) 6115 year += 2000 6116 return Date(year, month, day)
6117
6118 -class VfpTable(FpTable):
6119 """ 6120 Provides an interface for working with Visual FoxPro 6 tables 6121 """ 6122 6123 _version = 'Visual Foxpro' 6124 _versionabbr = 'vfp' 6125 6126 @MutableDefault
6127 - def _field_types():
6128 return { 6129 CHAR: { 6130 'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_character, 6131 'Class':str, 'Empty':str, 'flags':('binary', 'nocptrans', 'null', ), 6132 }, 6133 CURRENCY: { 6134 'Type':'Currency', 'Retrieve':retrieve_currency, 'Update':update_currency, 'Blank':lambda x: b'\x00' * 8, 'Init':add_vfp_currency, 6135 'Class':Decimal, 'Empty':none, 'flags':('null', ), 6136 }, 6137 DOUBLE: { 6138 'Type':'Double', 'Retrieve':retrieve_double, 'Update':update_double, 'Blank':lambda x: b'\x00' * 8, 'Init':add_vfp_double, 6139 'Class':float, 'Empty':none, 'flags':('null', ), 6140 }, 6141 FLOAT: { 6142 'Type':'Float', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_numeric, 6143 'Class':'default', 'Empty':none, 'flags':('null', ), 6144 }, 6145 NUMERIC: { 6146 'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_numeric, 6147 'Class':'default', 'Empty':none, 'flags':('null', ), 6148 }, 6149 INTEGER: { 6150 'Type':'Integer', 'Retrieve':retrieve_integer, 'Update':update_integer, 'Blank':lambda x: b'\x00' * 4, 'Init':add_vfp_integer, 6151 'Class':int, 'Empty':none, 'flags':('null', ), 6152 }, 6153 LOGICAL: { 6154 'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':lambda x: b'?', 'Init':add_logical, 6155 'Class':bool, 'Empty':none, 'flags':('null', ), 6156 }, 6157 DATE: { 6158 'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':lambda x: b' ', 'Init':add_date, 6159 'Class':datetime.date, 'Empty':none, 'flags':('null', ), 6160 }, 6161 DATETIME: { 6162 'Type':'DateTime', 'Retrieve':retrieve_vfp_datetime, 'Update':update_vfp_datetime, 'Blank':lambda x: b'\x00' * 8, 'Init':add_vfp_datetime, 6163 'Class':datetime.datetime, 'Empty':none, 'flags':('null', ), 6164 }, 6165 MEMO: { 6166 'Type':'Memo', 'Retrieve':retrieve_vfp_memo, 'Update':update_vfp_memo, 'Blank':lambda x: b'\x00\x00\x00\x00', 'Init':add_vfp_memo, 6167 'Class':str, 'Empty':str, 'flags':('binary', 'nocptrans', 'null', ), 6168 }, 6169 GENERAL: { 6170 'Type':'General', 'Retrieve':retrieve_vfp_memo, 'Update':update_vfp_memo, 'Blank':lambda x: b'\x00\x00\x00\x00', 'Init':add_vfp_binary_memo, 6171 'Class':bytes, 'Empty':bytes, 'flags':('null', ), 6172 }, 6173 PICTURE: { 6174 'Type':'Picture', 'Retrieve':retrieve_vfp_memo, 'Update':update_vfp_memo, 'Blank':lambda x: b'\x00\x00\x00\x00', 'Init':add_vfp_binary_memo, 6175 'Class':bytes, 'Empty':bytes, 'flags':('null', ), 6176 }, 6177 _NULLFLAG: { 6178 'Type':'_NullFlags', 'Retrieve':unsupported_type, 'Update':unsupported_type, 'Blank':lambda x: b'\x00' * x, 'Init':int, 6179 'Class':none, 'Empty':none, 'flags':('binary', 'system',), 6180 } }
6181 6182 _memoext = '.fpt' 6183 _memoClass = _VfpMemo 6184 _yesMemoMask = 0x30 # 0011 0000 6185 _noMemoMask = 0x30 # 0011 0000 6186 # _binary_types = ('B', 'G', 'I', 'P', 'T', 'Y') 6187 _binary_types = (DOUBLE, GENERAL, INTEGER, MEMO, PICTURE, DATETIME, CURRENCY) 6188 # _character_types = ('C', 'D', 'F', 'L', 'M', 'N') # field representing character data 6189 _character_types = (CHAR, DATE, FLOAT, LOGICAL, MEMO, NUMERIC) 6190 _currency_types = (CURRENCY, ) 6191 _date_types = (DATE, DATETIME) 6192 _datetime_types = (DATETIME, ) 6193 # _fixed_types = ('B', 'D', 'G', 'I', 'L', 'M', 'P', 'T', 'Y') 6194 _fixed_types = (DOUBLE, DATE, GENERAL, INTEGER, LOGICAL, MEMO, PICTURE, DATETIME, CURRENCY) 6195 _logical_types = (LOGICAL, ) 6196 _memo_types = (GENERAL, MEMO, PICTURE) 6197 # _numeric_types = ('B', 'F', 'I', 'N', 'Y') 6198 _numeric_types = (DOUBLE, FLOAT, INTEGER, NUMERIC, CURRENCY) 6199 _variable_types = (CHAR, FLOAT, NUMERIC) 6200 _supported_tables = (0x30, 0x31) 6201 _dbfTableHeader = array('B', [0] * 32) 6202 _dbfTableHeader[0] = 0x30 # version - Foxpro 6 0011 0000 6203 _dbfTableHeader[8:10] = array('B', pack_short_int(33 + 263)) 6204 _dbfTableHeader[10] = 1 # record length -- one for delete flag 6205 _dbfTableHeader[29] = 3 # code page -- 437 US-MS DOS 6206 _dbfTableHeader = _dbfTableHeader.tobytes() 6207 _dbfTableHeaderExtra = b'\x00' * 263 6208
6209 - def _initialize_fields(self):
6210 """ 6211 builds the FieldList of names, types, and descriptions 6212 """ 6213 meta = self._meta 6214 old_fields = defaultdict(dict) 6215 for name in meta.fields: 6216 old_fields[name]['type'] = meta[name][TYPE] 6217 old_fields[name]['class'] = meta[name][CLASS] 6218 old_fields[name]['empty'] = meta[name][EMPTY] 6219 meta.fields[:] = [] 6220 offset = 1 6221 fieldsdef = meta.header.fields 6222 meta.nullflags = None 6223 total_length = meta.header.record_length 6224 for i in range(meta.header.field_count): 6225 fieldblock = fieldsdef[i*32:(i+1)*32] 6226 name = self._meta.decoder(unpack_str(fieldblock[:11]))[0] 6227 type = fieldblock[11] 6228 if not type in meta.fieldtypes: 6229 raise BadDataError("Unknown field type: %s" % type) 6230 start = unpack_long_int(fieldblock[12:16]) 6231 length = fieldblock[16] 6232 offset += length 6233 end = start + length 6234 decimals = fieldblock[17] 6235 flags = fieldblock[18] 6236 if name in meta.fields: 6237 raise BadDataError('Duplicate field name found: %s' % name) 6238 meta.fields.append(name) 6239 if name in old_fields and old_fields[name]['type'] == type: 6240 cls = old_fields[name]['class'] 6241 empty = old_fields[name]['empty'] 6242 else: 6243 cls = meta.fieldtypes[type]['Class'] 6244 empty = meta.fieldtypes[type]['Empty'] 6245 meta[name] = ( 6246 type, 6247 start, 6248 length, 6249 end, 6250 decimals, 6251 flags, 6252 cls, 6253 empty, 6254 ) 6255 if offset != total_length: 6256 raise BadDataError("Header shows record length of %d, but calculated record length is %d" % (total_length, offset)) 6257 meta.user_fields = [f for f in meta.fields if not meta[f][FLAGS] & SYSTEM] 6258 meta.user_field_count = len(meta.user_fields) 6259 Record._create_blank_data(meta)
6260
6261 6262 -class List(_Navigation):
6263 """ 6264 list of Dbf records, with set-like behavior 6265 """ 6266 6267 _desc = '' 6268
6269 - def __init__(self, records=None, desc=None, key=None):
6270 self._list = [] 6271 self._set = set() 6272 self._tables = dict() 6273 if key is not None: 6274 self.key = key 6275 if key.__doc__ is None: 6276 key.__doc__ = 'unknown' 6277 key = self.key 6278 self._current = -1 6279 if isinstance(records, self.__class__) and key is records.key: 6280 self._list = records._list[:] 6281 self._set = records._set.copy() 6282 self._current = 0 6283 elif records is not None: 6284 for record in records: 6285 value = key(record) 6286 item = (source_table(record), recno(record), value) 6287 if value not in self._set: 6288 self._set.add(value) 6289 self._list.append(item) 6290 self._current = 0 6291 if desc is not None: 6292 self._desc = desc
6293
6294 - def __add__(self, other):
6295 self._still_valid_check() 6296 key = self.key 6297 if isinstance(other, (Table, list)): 6298 other = self.__class__(other, key=key) 6299 if isinstance(other, self.__class__): 6300 other._still_valid_check() 6301 result = self.__class__() 6302 result._set = self._set.copy() 6303 result._list[:] = self._list[:] 6304 result._tables = {} 6305 result._tables.update(self._tables) 6306 result.key = self.key 6307 if key is other.key: # same key? just compare key values 6308 for item in other._list: 6309 result._maybe_add(item) 6310 else: # different keys, use this list's key on other's records 6311 for rec in other: 6312 result._maybe_add((source_table(rec), recno(rec), key(rec))) 6313 return result 6314 return NotImplemented
6315
6316 - def __contains__(self, data):
6317 self._still_valid_check() 6318 if not isinstance(data, (Record, RecordTemplate, tuple, dict)): 6319 raise TypeError("%r is not a record, templace, tuple, nor dict" % (data, )) 6320 try: # attempt quick method 6321 item = self.key(data) 6322 if not isinstance(item, tuple): 6323 item = (item, ) 6324 return item in self._set 6325 except Exception: # argh, try brute force method 6326 for record in self: 6327 if record == data: 6328 return True 6329 return False
6330
6331 - def __delitem__(self, key):
6332 self._still_valid_check() 6333 if isinstance(key, baseinteger): 6334 item = self._list.pop[key] 6335 self._set.remove(item[2]) 6336 elif isinstance(key, slice): 6337 self._set.difference_update([item[2] for item in self._list[key]]) 6338 self._list.__delitem__(key) 6339 elif isinstance(key, (Record, RecordTemplate, dict, tuple)): 6340 index = self.index(key) 6341 item = self._list.pop[index] 6342 self._set.remove(item[2]) 6343 else: 6344 raise TypeError('%r should be an int, slice, record, template, tuple, or dict -- not a %r' % (key, type(key)))
6345
6346 - def __getitem__(self, key):
6347 self._still_valid_check() 6348 if isinstance(key, baseinteger): 6349 count = len(self._list) 6350 if not -count <= key < count: 6351 raise NotFoundError("Record %d is not in list." % key) 6352 return self._get_record(*self._list[key]) 6353 elif isinstance(key, slice): 6354 result = self.__class__() 6355 result._list[:] = self._list[key] 6356 result._set = set(result._list) 6357 result.key = self.key 6358 return result 6359 elif isinstance(key, (Record, RecordTemplate, dict, tuple)): 6360 index = self.index(key) 6361 return self._get_record(*self._list[index]) 6362 else: 6363 raise TypeError('%r should be an int, slice, record, record template, tuple, or dict -- not a %r' % (key, type(key)))
6364
6365 - def __iter__(self):
6366 self._still_valid_check() 6367 return Iter(self)
6368
6369 - def __len__(self):
6370 self._still_valid_check() 6371 return len(self._list)
6372
6373 - def __bool__(self):
6374 self._still_valid_check() 6375 return len(self) > 0
6376
6377 - def __radd__(self, other):
6378 self._still_valid_check() 6379 key = self.key 6380 if isinstance(other, (Table, list)): 6381 other = self.__class__(other, key=key) 6382 if isinstance(other, self.__class__): 6383 other._still_valid_check() 6384 result = other.__class__() 6385 result._set = other._set.copy() 6386 result._list[:] = other._list[:] 6387 result._tables = {} 6388 result._tables.update(self._tables) 6389 result.key = other.key 6390 if key is other.key: # same key? just compare key values 6391 for item in self._list: 6392 result._maybe_add(item) 6393 else: # different keys, use this list's key on other's records 6394 for rec in self: 6395 result._maybe_add((source_table(rec), recno(rec), key(rec))) 6396 return result 6397 return NotImplemented
6398
6399 - def __repr__(self):
6400 self._still_valid_check() 6401 if self._desc: 6402 return "%s(key=(%s), desc=%s)" % (self.__class__, self.key.__doc__, self._desc) 6403 else: 6404 return "%s(key=(%s))" % (self.__class__, self.key.__doc__)
6405
6406 - def __rsub__(self, other):
6407 self._still_valid_check() 6408 key = self.key 6409 if isinstance(other, (Table, list)): 6410 other = self.__class__(other, key=key) 6411 if isinstance(other, self.__class__): 6412 other._still_valid_check() 6413 result = other.__class__() 6414 result._list[:] = other._list[:] 6415 result._set = other._set.copy() 6416 result._tables = {} 6417 result._tables.update(other._tables) 6418 result.key = key 6419 lost = set() 6420 if key is other.key: 6421 for item in self._list: 6422 if item[2] in result._list: 6423 result._set.remove(item[2]) 6424 lost.add(item) 6425 else: 6426 for rec in self: 6427 value = key(rec) 6428 if value in result._set: 6429 result._set.remove(value) 6430 lost.add((source_table(rec), recno(rec), value)) 6431 result._list = [item for item in result._list if item not in lost] 6432 lost = set(result._tables.keys()) 6433 for table, _1, _2 in result._list: 6434 if table in result._tables: 6435 lost.remove(table) 6436 if not lost: 6437 break 6438 for table in lost: 6439 del result._tables[table] 6440 return result 6441 return NotImplemented
6442
6443 - def __sub__(self, other):
6444 self._still_valid_check() 6445 key = self.key 6446 if isinstance(other, (Table, list)): 6447 other = self.__class__(other, key=key) 6448 if isinstance(other, self.__class__): 6449 other._still_valid_check() 6450 result = self.__class__() 6451 result._list[:] = self._list[:] 6452 result._set = self._set.copy() 6453 result._tables = {} 6454 result._tables.update(self._tables) 6455 result.key = key 6456 lost = set() 6457 if key is other.key: 6458 for item in other._list: 6459 if item[2] in result._set: 6460 result._set.remove(item[2]) 6461 lost.add(item[2]) 6462 else: 6463 for rec in other: 6464 value = key(rec) 6465 if value in result._set: 6466 result._set.remove(value) 6467 lost.add(value) 6468 result._list = [item for item in result._list if item[2] not in lost] 6469 lost = set(result._tables.keys()) 6470 for table, _1, _2 in result._list: 6471 if table in result._tables: 6472 lost.remove(table) 6473 if not lost: 6474 break 6475 for table in lost: 6476 del result._tables[table] 6477 return result 6478 return NotImplemented
6479
6480 - def _maybe_add(self, item):
6481 self._still_valid_check() 6482 table, recno, key = item 6483 self._tables[table] = table._pack_count # TODO: check that _pack_count is the same if already in table 6484 if key not in self._set: 6485 self._set.add(key) 6486 self._list.append(item)
6487
6488 - def _get_record(self, table=None, rec_no=None, value=None):
6489 if table is rec_no is None: 6490 table, rec_no, value = self._list[self._index] 6491 return table[rec_no]
6492
6493 - def _purge(self, record, old_record_number, offset):
6494 partial = source_table(record), old_record_number 6495 records = sorted(self._list, key=lambda item: (item[0], item[1])) 6496 for item in records: 6497 if partial == item[:2]: 6498 found = True 6499 break 6500 elif partial[0] is item[0] and partial[1] < item[1]: 6501 found = False 6502 break 6503 else: 6504 found = False 6505 if found: 6506 self._list.pop(self._list.index(item)) 6507 self._set.remove(item[2]) 6508 start = records.index(item) + found 6509 for item in records[start:]: 6510 if item[0] is not partial[0]: # into other table's records 6511 break 6512 i = self._list.index(item) 6513 self._set.remove(item[2]) 6514 item = item[0], (item[1] - offset), item[2] 6515 self._list[i] = item 6516 self._set.add(item[2]) 6517 return found
6518
6519 - def _still_valid_check(self):
6520 for table, last_pack in self._tables.items(): 6521 if last_pack != getattr(table, '_pack_count'): 6522 raise DbfError("table has been packed; list is invalid")
6523 6524 _nav_check = _still_valid_check 6525
6526 - def append(self, record):
6527 self._still_valid_check() 6528 self._maybe_add((source_table(record), recno(record), self.key(record)))
6529
6530 - def clear(self):
6531 self._list = [] 6532 self._set = set() 6533 self._index = -1 6534 self._tables.clear()
6535
6536 - def extend(self, records):
6537 self._still_valid_check() 6538 key = self.key 6539 if isinstance(records, self.__class__): 6540 if key is records.key: # same key? just compare key values 6541 for item in records._list: 6542 self._maybe_add(item) 6543 else: # different keys, use this list's key on other's records 6544 for rec in records: 6545 value = key(rec) 6546 self._maybe_add((source_table(rec), recno(rec), value)) 6547 else: 6548 for rec in records: 6549 value = key(rec) 6550 self._maybe_add((source_table(rec), recno(rec), value))
6551
6552 - def index(self, record, start=None, stop=None):
6553 """ 6554 returns the index of record between start and stop 6555 start and stop default to the first and last record 6556 """ 6557 if not isinstance(record, (Record, RecordTemplate, dict, tuple)): 6558 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(record)) 6559 self._still_valid_check() 6560 if start is None: 6561 start = 0 6562 if stop is None: 6563 stop = len(self) 6564 for i in range(start, stop): 6565 if record == (self[i]): 6566 return i 6567 else: 6568 raise NotFoundError("dbf.List.index(x): x not in List", data=record)
6569
6570 - def insert(self, i, record):
6571 self._still_valid_check() 6572 item = source_table(record), recno(record), self.key(record) 6573 if item not in self._set: 6574 self._set.add(item[2]) 6575 self._list.insert(i, item)
6576
6577 - def key(self, record):
6578 """ 6579 table_name, record_number 6580 """ 6581 self._still_valid_check() 6582 return source_table(record), recno(record)
6583
6584 - def pop(self, index=None):
6585 self._still_valid_check() 6586 if index is None: 6587 table, recno, value = self._list.pop() 6588 else: 6589 table, recno, value = self._list.pop(index) 6590 self._set.remove(value) 6591 return self._get_record(table, recno, value)
6592
6593 - def query(self, criteria):
6594 """ 6595 criteria is a callback that returns a truthy value for matching record 6596 """ 6597 return pql(self, criteria)
6598
6599 - def remove(self, data):
6600 self._still_valid_check() 6601 if not isinstance(data, (Record, RecordTemplate, dict, tuple)): 6602 raise TypeError("%r(%r) is not a record, template, tuple, nor dict" % (type(data), data)) 6603 index = self.index(data) 6604 record = self[index] 6605 item = source_table(record), recno(record), self.key(record) 6606 self._list.remove(item) 6607 self._set.remove(item[2])
6608
6609 - def reverse(self):
6610 self._still_valid_check() 6611 return self._list.reverse()
6612
6613 - def sort(self, key=None, reverse=False):
6614 self._still_valid_check() 6615 if key is None: 6616 return self._list.sort(reverse=reverse) 6617 return self._list.sort(key=lambda item: key(item[0][item[1]]), reverse=reverse)
6618
6619 6620 -class Index(_Navigation):
6621 """ 6622 non-persistent index for a table 6623 """ 6624
6625 - def __init__(self, table, key):
6626 self._table = table 6627 self._values = [] # ordered list of values 6628 self._rec_by_val = [] # matching record numbers 6629 self._records = {} # record numbers:values 6630 self.__doc__ = key.__doc__ or 'unknown' 6631 self._key = key 6632 self._previous_status = [] 6633 for record in table: 6634 value = key(record) 6635 if value is DoNotIndex: 6636 continue 6637 rec_num = recno(record) 6638 if not isinstance(value, tuple): 6639 value = (value, ) 6640 vindex = bisect_right(self._values, value) 6641 self._values.insert(vindex, value) 6642 self._rec_by_val.insert(vindex, rec_num) 6643 self._records[rec_num] = value 6644 table._indexen.add(self)
6645
6646 - def __call__(self, record):
6647 rec_num = recno(record) 6648 key = self.key(record) 6649 if rec_num in self._records: 6650 if self._records[rec_num] == key: 6651 return 6652 old_key = self._records[rec_num] 6653 vindex = bisect_left(self._values, old_key) 6654 self._values.pop(vindex) 6655 self._rec_by_val.pop(vindex) 6656 del self._records[rec_num] 6657 assert rec_num not in self._records 6658 if key == (DoNotIndex, ): 6659 return 6660 vindex = bisect_right(self._values, key) 6661 self._values.insert(vindex, key) 6662 self._rec_by_val.insert(vindex, rec_num) 6663 self._records[rec_num] = key
6664
6665 - def __contains__(self, data):
6666 if not isinstance(data, (Record, RecordTemplate, tuple, dict)): 6667 raise TypeError("%r is not a record, templace, tuple, nor dict" % (data, )) 6668 try: 6669 value = self.key(data) 6670 return value in self._values 6671 except Exception: 6672 for record in self: 6673 if record == data: 6674 return True 6675 return False
6676
6677 - def __getitem__(self, key):
6678 '''if key is an integer, returns the matching record; 6679 if key is a [slice | string | tuple | record] returns a List; 6680 raises NotFoundError on failure''' 6681 if isinstance(key, baseinteger): 6682 count = len(self._values) 6683 if not -count <= key < count: 6684 raise NotFoundError("Record %d is not in list." % key) 6685 rec_num = self._rec_by_val[key] 6686 return self._table[rec_num] 6687 elif isinstance(key, slice): 6688 result = List() 6689 start, stop, step = key.start, key.stop, key.step 6690 if start is None: start = 0 6691 if stop is None: stop = len(self._rec_by_val) 6692 if step is None: step = 1 6693 if step < 0: 6694 start, stop = stop - 1, -(stop - start + 1) 6695 for loc in range(start, stop, step): 6696 record = self._table[self._rec_by_val[loc]] 6697 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record))) 6698 return result 6699 elif isinstance (key, (basestring, tuple, Record, RecordTemplate)): 6700 if isinstance(key, (Record, RecordTemplate)): 6701 key = self.key(key) 6702 elif isinstance(key, basestring): 6703 key = (key, ) 6704 lo = self._search(key, where='left') 6705 hi = self._search(key, where='right') 6706 if lo == hi: 6707 raise NotFoundError(key) 6708 result = List(desc='match = %r' % (key, )) 6709 for loc in range(lo, hi): 6710 record = self._table[self._rec_by_val[loc]] 6711 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record))) 6712 return result 6713 else: 6714 raise TypeError('indices must be integers, match objects must by strings or tuples')
6715
6716 - def __enter__(self):
6717 self._table.__enter__() 6718 return self
6719
6720 - def __exit__(self, *exc_info):
6721 self._table.__exit__() 6722 return False
6723
6724 - def __iter__(self):
6725 return Iter(self)
6726
6727 - def __len__(self):
6728 return len(self._records)
6729
6730 - def _clear(self):
6731 """ 6732 removes all entries from index 6733 """ 6734 self._values[:] = [] 6735 self._rec_by_val[:] = [] 6736 self._records.clear()
6737
6738 - def _key(self, record):
6739 """ 6740 table_name, record_number 6741 """ 6742 self._still_valid_check() 6743 return source_table(record), recno(record)
6744
6745 - def _nav_check(self):
6746 """ 6747 raises error if table is closed 6748 """ 6749 if self._table._meta.status == CLOSED: 6750 raise DbfError('indexed table %s is closed' % self.filename)
6751
6752 - def _partial_match(self, target, match):
6753 target = target[:len(match)] 6754 if isinstance(match[-1], basestring): 6755 target = list(target) 6756 target[-1] = target[-1][:len(match[-1])] 6757 target = tuple(target) 6758 return target == match
6759
6760 - def _purge(self, rec_num):
6761 value = self._records.get(rec_num) 6762 if value is not None: 6763 vindex = bisect_left(self._values, value) 6764 del self._records[rec_num] 6765 self._values.pop(vindex) 6766 self._rec_by_val.pop(vindex)
6767
6768 - def _reindex(self):
6769 """ 6770 reindexes all records 6771 """ 6772 for record in self._table: 6773 self(record)
6774
6775 - def _search(self, match, lo=0, hi=None, where=None):
6776 if hi is None: 6777 hi = len(self._values) 6778 if where == 'left': 6779 return bisect_left(self._values, match, lo, hi) 6780 elif where == 'right': 6781 return bisect_right(self._values, match, lo, hi)
6782
6783 - def index(self, record, start=None, stop=None):
6784 """ 6785 returns the index of record between start and stop 6786 start and stop default to the first and last record 6787 """ 6788 if not isinstance(record, (Record, RecordTemplate, dict, tuple)): 6789 raise TypeError("x should be a record, template, dict, or tuple, not %r" % type(record)) 6790 self._nav_check() 6791 if start is None: 6792 start = 0 6793 if stop is None: 6794 stop = len(self) 6795 for i in range(start, stop): 6796 if record == (self[i]): 6797 return i 6798 else: 6799 raise NotFoundError("dbf.Index.index(x): x not in Index", data=record)
6800
6801 - def index_search(self, match, start=None, stop=None, nearest=False, partial=False):
6802 """ 6803 returns the index of match between start and stop 6804 start and stop default to the first and last record. 6805 if nearest is true returns the location of where the match should be 6806 otherwise raises NotFoundError 6807 """ 6808 self._nav_check() 6809 if not isinstance(match, tuple): 6810 match = (match, ) 6811 if start is None: 6812 start = 0 6813 if stop is None: 6814 stop = len(self) 6815 loc = self._search(match, start, stop, where='left') 6816 if loc == len(self._values): 6817 if nearest: 6818 return IndexLocation(loc, False) 6819 raise NotFoundError("dbf.Index.index_search(x): x not in index", data=match) 6820 if self._values[loc] == match \ 6821 or partial and self._partial_match(self._values[loc], match): 6822 return IndexLocation(loc, True) 6823 elif nearest: 6824 return IndexLocation(loc, False) 6825 else: 6826 raise NotFoundError("dbf.Index.index_search(x): x not in Index", data=match)
6827
6828 - def key(self, record):
6829 result = self._key(record) 6830 if not isinstance(result, tuple): 6831 result = (result, ) 6832 return result
6833
6834 - def query(self, criteria):
6835 """ 6836 criteria is a callback that returns a truthy value for matching record 6837 """ 6838 self._nav_check() 6839 return pql(self, criteria)
6840
6841 - def search(self, match, partial=False):
6842 """ 6843 returns dbf.List of all (partially) matching records 6844 """ 6845 self._nav_check() 6846 result = List() 6847 if not isinstance(match, tuple): 6848 match = (match, ) 6849 loc = self._search(match, where='left') 6850 if loc == len(self._values): 6851 return result 6852 while loc < len(self._values) and self._values[loc] == match: 6853 record = self._table[self._rec_by_val[loc]] 6854 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record))) 6855 loc += 1 6856 if partial: 6857 while loc < len(self._values) and self._partial_match(self._values[loc], match): 6858 record = self._table[self._rec_by_val[loc]] 6859 result._maybe_add(item=(self._table, self._rec_by_val[loc], result.key(record))) 6860 loc += 1 6861 return result
6862
6863 6864 -class Relation(object):
6865 """ 6866 establishes a relation between two dbf tables (not persistent) 6867 """ 6868 6869 relations = {} 6870
6871 - def __new__(cls, src, tgt, src_names=None, tgt_names=None):
6872 if (len(src) != 2 or len(tgt) != 2): 6873 raise DbfError("Relation should be called with ((src_table, src_field), (tgt_table, tgt_field))") 6874 if src_names and len(src_names) !=2 or tgt_names and len(tgt_names) != 2: 6875 raise DbfError('src_names and tgt_names, if specified, must be ("table","field")') 6876 src_table, src_field = src 6877 tgt_table, tgt_field = tgt 6878 try: 6879 if isinstance(src_field, baseinteger): 6880 table, field = src_table, src_field 6881 src_field = table.field_names[field] 6882 else: 6883 src_table.field_names.index(src_field) 6884 if isinstance(tgt_field, baseinteger): 6885 table, field = tgt_table, tgt_field 6886 tgt_field = table.field_names[field] 6887 else: 6888 tgt_table.field_names.index(tgt_field) 6889 except (IndexError, ValueError): 6890 raise DbfError('%r not in %r' % (field, table)) from None 6891 if src_names: 6892 src_table_name, src_field_name = src_names 6893 else: 6894 src_table_name, src_field_name = src_table.filename, src_field 6895 if src_table_name[-4:].lower() == '.dbf': 6896 src_table_name = src_table_name[:-4] 6897 if tgt_names: 6898 tgt_table_name, tgt_field_name = tgt_names 6899 else: 6900 tgt_table_name, tgt_field_name = tgt_table.filename, tgt_field 6901 if tgt_table_name[-4:].lower() == '.dbf': 6902 tgt_table_name = tgt_table_name[:-4] 6903 relation = cls.relations.get(((src_table, src_field), (tgt_table, tgt_field))) 6904 if relation is not None: 6905 return relation 6906 obj = object.__new__(cls) 6907 obj._src_table, obj._src_field = src_table, src_field 6908 obj._tgt_table, obj._tgt_field = tgt_table, tgt_field 6909 obj._src_table_name, obj._src_field_name = src_table_name, src_field_name 6910 obj._tgt_table_name, obj._tgt_field_name = tgt_table_name, tgt_field_name 6911 obj._tables = dict() 6912 cls.relations[((src_table, src_field), (tgt_table, tgt_field))] = obj 6913 return obj
6914
6915 - def __eq__(yo, other):
6916 if (yo.src_table == other.src_table 6917 and yo.src_field == other.src_field 6918 and yo.tgt_table == other.tgt_table 6919 and yo.tgt_field == other.tgt_field): 6920 return True 6921 return False
6922
6923 - def __getitem__(yo, record):
6924 """ 6925 record should be from the source table 6926 """ 6927 key = (record[yo.src_field], ) 6928 try: 6929 return yo.index[key] 6930 except NotFoundError: 6931 return List(desc='%s not found' % key)
6932
6933 - def __hash__(yo):
6934 return hash((yo.src_table, yo.src_field, yo.tgt_table, yo.tgt_field))
6935
6936 - def __ne__(yo, other):
6937 if (yo.src_table != other.src_table 6938 or yo.src_field != other.src_field 6939 or yo.tgt_table != other.tgt_table 6940 or yo.tgt_field != other.tgt_field): 6941 return True 6942 return False
6943
6944 - def __repr__(yo):
6945 return "Relation((%r, %r), (%r, %r))" % (yo.src_table_name, yo.src_field, yo.tgt_table_name, yo.tgt_field)
6946
6947 - def __str__(yo):
6948 return "%s:%s --> %s:%s" % (yo.src_table_name, yo.src_field_name, yo.tgt_table_name, yo.tgt_field_name)
6949 6950 @property
6951 - def src_table(yo):
6952 "name of source table" 6953 return yo._src_table
6954 6955 @property
6956 - def src_field(yo):
6957 "name of source field" 6958 return yo._src_field
6959 6960 @property
6961 - def src_table_name(yo):
6962 return yo._src_table_name
6963 6964 @property
6965 - def src_field_name(yo):
6966 return yo._src_field_name
6967 6968 @property
6969 - def tgt_table(yo):
6970 "name of target table" 6971 return yo._tgt_table
6972 6973 @property
6974 - def tgt_field(yo):
6975 "name of target field" 6976 return yo._tgt_field
6977 6978 @property
6979 - def tgt_table_name(yo):
6980 return yo._tgt_table_name
6981 6982 @property
6983 - def tgt_field_name(yo):
6984 return yo._tgt_field_name
6985 6986 @LazyAttr
6987 - def index(yo):
6988 def index(record, field=yo._tgt_field): 6989 return record[field]
6990 index.__doc__ = "%s:%s --> %s:%s" % (yo.src_table_name, yo.src_field_name, yo.tgt_table_name, yo.tgt_field_name) 6991 yo.index = yo._tgt_table.create_index(index) 6992 source = dbf.List(yo._src_table, key=lambda rec, field=yo._src_field: rec[field]) 6993 target = dbf.List(yo._tgt_table, key=lambda rec, field=yo._tgt_field: rec[field]) 6994 if len(source) != len(yo._src_table): 6995 yo._tables[yo._src_table] = 'many' 6996 else: 6997 yo._tables[yo._src_table] = 'one' 6998 if len(target) != len(yo._tgt_table): 6999 yo._tables[yo._tgt_table] = 'many' 7000 else: 7001 yo._tables[yo._tgt_table] = 'one' 7002 return yo.index
7003
7004 - def one_or_many(yo, table):
7005 yo.index # make sure yo._tables has been populated 7006 try: 7007 if isinstance(table, basestring): 7008 table = (yo._src_table, yo._tgt_table)[yo._tgt_table_name == table] 7009 return yo._tables[table] 7010 except IndexError: 7011 raise NotFoundError("table %s not in relation" % table) from None
7012
7013 7014 -class IndexFile(_Navigation):
7015 pass
7016 7017 7018 # table meta 7019 7020 table_types = { 7021 'db3' : Db3Table, 7022 'clp' : ClpTable, 7023 'fp' : FpTable, 7024 'vfp' : VfpTable, 7025 } 7026 7027 version_map = { 7028 0x02 : 'FoxBASE', 7029 0x03 : 'dBase III Plus', 7030 0x04 : 'dBase IV', 7031 0x05 : 'dBase V', 7032 0x30 : 'Visual FoxPro', 7033 0x31 : 'Visual FoxPro (auto increment field)', 7034 0x32 : 'Visual FoxPro (VarChar, VarBinary, or BLOB enabled)', 7035 0x43 : 'dBase IV SQL table files', 7036 0x63 : 'dBase IV SQL system files', 7037 0x83 : 'dBase III Plus w/memos', 7038 0x8b : 'dBase IV w/memos', 7039 0x8e : 'dBase IV w/SQL table', 7040 0xf5 : 'FoxPro w/memos'} 7041 7042 code_pages = { 7043 0x00 : ('ascii', "plain ol' ascii"), 7044 0x01 : ('cp437', 'U.S. MS-DOS'), 7045 0x02 : ('cp850', 'International MS-DOS'), 7046 0x03 : ('cp1252', 'Windows ANSI'), 7047 0x04 : ('mac_roman', 'Standard Macintosh'), 7048 0x08 : ('cp865', 'Danish OEM'), 7049 0x09 : ('cp437', 'Dutch OEM'), 7050 0x0A : ('cp850', 'Dutch OEM (secondary)'), 7051 0x0B : ('cp437', 'Finnish OEM'), 7052 0x0D : ('cp437', 'French OEM'), 7053 0x0E : ('cp850', 'French OEM (secondary)'), 7054 0x0F : ('cp437', 'German OEM'), 7055 0x10 : ('cp850', 'German OEM (secondary)'), 7056 0x11 : ('cp437', 'Italian OEM'), 7057 0x12 : ('cp850', 'Italian OEM (secondary)'), 7058 0x13 : ('cp932', 'Japanese Shift-JIS'), 7059 0x14 : ('cp850', 'Spanish OEM (secondary)'), 7060 0x15 : ('cp437', 'Swedish OEM'), 7061 0x16 : ('cp850', 'Swedish OEM (secondary)'), 7062 0x17 : ('cp865', 'Norwegian OEM'), 7063 0x18 : ('cp437', 'Spanish OEM'), 7064 0x19 : ('cp437', 'English OEM (Britain)'), 7065 0x1A : ('cp850', 'English OEM (Britain) (secondary)'), 7066 0x1B : ('cp437', 'English OEM (U.S.)'), 7067 0x1C : ('cp863', 'French OEM (Canada)'), 7068 0x1D : ('cp850', 'French OEM (secondary)'), 7069 0x1F : ('cp852', 'Czech OEM'), 7070 0x22 : ('cp852', 'Hungarian OEM'), 7071 0x23 : ('cp852', 'Polish OEM'), 7072 0x24 : ('cp860', 'Portugese OEM'), 7073 0x25 : ('cp850', 'Potugese OEM (secondary)'), 7074 0x26 : ('cp866', 'Russian OEM'), 7075 0x37 : ('cp850', 'English OEM (U.S.) (secondary)'), 7076 0x40 : ('cp852', 'Romanian OEM'), 7077 0x4D : ('cp936', 'Chinese GBK (PRC)'), 7078 0x4E : ('cp949', 'Korean (ANSI/OEM)'), 7079 0x4F : ('cp950', 'Chinese Big 5 (Taiwan)'), 7080 0x50 : ('cp874', 'Thai (ANSI/OEM)'), 7081 0x57 : ('cp1252', 'ANSI'), 7082 0x58 : ('cp1252', 'Western European ANSI'), 7083 0x59 : ('cp1252', 'Spanish ANSI'), 7084 0x64 : ('cp852', 'Eastern European MS-DOS'), 7085 0x65 : ('cp866', 'Russian MS-DOS'), 7086 0x66 : ('cp865', 'Nordic MS-DOS'), 7087 0x67 : ('cp861', 'Icelandic MS-DOS'), 7088 0x68 : (None, 'Kamenicky (Czech) MS-DOS'), 7089 0x69 : (None, 'Mazovia (Polish) MS-DOS'), 7090 0x6a : ('cp737', 'Greek MS-DOS (437G)'), 7091 0x6b : ('cp857', 'Turkish MS-DOS'), 7092 0x78 : ('cp950', 'Traditional Chinese (Hong Kong SAR, Taiwan) Windows'), 7093 0x79 : ('cp949', 'Korean Windows'), 7094 0x7a : ('cp936', 'Chinese Simplified (PRC, Singapore) Windows'), 7095 0x7b : ('cp932', 'Japanese Windows'), 7096 0x7c : ('cp874', 'Thai Windows'), 7097 0x7d : ('cp1255', 'Hebrew Windows'), 7098 0x7e : ('cp1256', 'Arabic Windows'), 7099 0xc8 : ('cp1250', 'Eastern European Windows'), 7100 0xc9 : ('cp1251', 'Russian Windows'), 7101 0xca : ('cp1254', 'Turkish Windows'), 7102 0xcb : ('cp1253', 'Greek Windows'), 7103 0x96 : ('mac_cyrillic', 'Russian Macintosh'), 7104 0x97 : ('mac_latin2', 'Macintosh EE'), 7105 0x98 : ('mac_greek', 'Greek Macintosh'), 7106 0xf0 : ('utf8', '8-bit unicode'), 7107 } 7108 7109 7110 default_codepage = code_pages.get(default_codepage, code_pages.get(0x00))[0]
7111 7112 7113 -def _nop(value):
7114 """ 7115 returns parameter unchanged 7116 """ 7117 return value
7118
7119 -def _normalize_tuples(tuples, length, filler):
7120 """ 7121 ensures each tuple is the same length, using filler[-missing] for the gaps 7122 """ 7123 final = [] 7124 for t in tuples: 7125 if len(t) < length: 7126 final.append( tuple([item for item in t] + filler[len(t)-length:]) ) 7127 else: 7128 final.append(t) 7129 return tuple(final)
7130
7131 -def _codepage_lookup(cp):
7132 if cp not in code_pages: 7133 for code_page in sorted(code_pages.keys()): 7134 sd, ld = code_pages[code_page] 7135 if cp == sd or cp == ld: 7136 if sd is None: 7137 raise DbfError("Unsupported codepage: %s" % ld) 7138 cp = code_page 7139 break 7140 else: 7141 raise DbfError("Unsupported codepage: %s" % cp) 7142 sd, ld = code_pages[cp] 7143 return cp, sd, ld
7144
7145 7146 # miscellany 7147 7148 -class _Db4Table(Table):
7149 """ 7150 under development 7151 """ 7152 7153 version = 'dBase IV w/memos (non-functional)' 7154 _versionabbr = 'db4' 7155 7156 @MutableDefault
7157 - def _field_types():
7158 return { 7159 CHAR: {'Type':'Character', 'Retrieve':retrieve_character, 'Update':update_character, 'Blank':lambda x: b' ' * x, 'Init':add_vfp_character}, 7160 CURRENCY: {'Type':'Currency', 'Retrieve':retrieve_currency, 'Update':update_currency, 'Blank':Decimal, 'Init':add_vfp_currency}, 7161 DOUBLE: {'Type':'Double', 'Retrieve':retrieve_double, 'Update':update_double, 'Blank':float, 'Init':add_vfp_double}, 7162 FLOAT: {'Type':'Float', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':float, 'Init':add_vfp_numeric}, 7163 NUMERIC: {'Type':'Numeric', 'Retrieve':retrieve_numeric, 'Update':update_numeric, 'Blank':int, 'Init':add_vfp_numeric}, 7164 INTEGER: {'Type':'Integer', 'Retrieve':retrieve_integer, 'Update':update_integer, 'Blank':int, 'Init':add_vfp_integer}, 7165 LOGICAL: {'Type':'Logical', 'Retrieve':retrieve_logical, 'Update':update_logical, 'Blank':Logical, 'Init':add_logical}, 7166 DATE: {'Type':'Date', 'Retrieve':retrieve_date, 'Update':update_date, 'Blank':Date, 'Init':add_date}, 7167 DATETIME: {'Type':'DateTime', 'Retrieve':retrieve_vfp_datetime, 'Update':update_vfp_datetime, 'Blank':DateTime, 'Init':add_vfp_datetime}, 7168 MEMO: {'Type':'Memo', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ' * x, 'Init':add_memo}, 7169 GENERAL: {'Type':'General', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ' * x, 'Init':add_memo}, 7170 PICTURE: {'Type':'Picture', 'Retrieve':retrieve_memo, 'Update':update_memo, 'Blank':lambda x: b' ' * x, 'Init':add_memo}, 7171 _NULLFLAG: {'Type':'_NullFlags', 'Retrieve':unsupported_type, 'Update':unsupported_type, 'Blank':int, 'Init':None} }
7172 7173 _memoext = '.dbt' 7174 _memotypes = ('G', 'M', 'P') 7175 _memoClass = _VfpMemo 7176 _yesMemoMask = 0x8b # 0011 0000 7177 _noMemoMask = 0x04 # 0011 0000 7178 _fixed_fields = ('B', 'D', 'G', 'I', 'L', 'M', 'P', 'T', 'Y') 7179 _variable_fields = ('C', 'F', 'N') 7180 _binary_fields = ('G', 'P') 7181 _character_fields = ('C', 'M') # field representing character data 7182 _decimal_fields = ('F', 'N') 7183 _numeric_fields = ('B', 'F', 'I', 'N', 'Y') 7184 _currency_fields = ('Y',) 7185 _supported_tables = (0x04, 0x8b) 7186 _dbfTableHeader = [0] * 32 7187 _dbfTableHeader[0] = 0x8b # version - Foxpro 6 0011 0000 7188 _dbfTableHeader[10] = 0x01 # record length -- one for delete flag 7189 _dbfTableHeader[29] = 0x03 # code page -- 437 US-MS DOS 7190 _dbfTableHeader = bytes(_dbfTableHeader) 7191 _dbfTableHeaderExtra = b'' 7192
7193 - def _check_memo_integrity(self):
7194 """ 7195 dBase IV specific 7196 """ 7197 if self._meta.header.version == 0x8b: 7198 try: 7199 self._meta.memo = self._memoClass(self._meta) 7200 except: 7201 self._meta.dfd.close() 7202 self._meta.dfd = None 7203 raise 7204 if not self._meta.ignorememos: 7205 for field in self._meta.fields: 7206 if self._meta[field][TYPE] in self._memotypes: 7207 if self._meta.header.version != 0x8b: 7208 self._meta.dfd.close() 7209 self._meta.dfd = None 7210 raise BadDataError("Table structure corrupt: memo fields exist, header declares no memos") 7211 elif not os.path.exists(self._meta.memoname): 7212 self._meta.dfd.close() 7213 self._meta.dfd = None 7214 raise BadDataError("Table structure corrupt: memo fields exist without memo file") 7215 break
7216
7217 7218 # utility functions 7219 7220 -def create_template(table_or_record, defaults=None):
7221 if isinstance(table_or_record, Table): 7222 return RecordTemplate(table_or_record._meta, defaults) 7223 else: 7224 return RecordTemplate(table_or_record._meta, table_or_record, defaults)
7225
7226 -def delete(record):
7227 """ 7228 marks record as deleted 7229 """ 7230 template = isinstance(record, RecordTemplate) 7231 if not template and record._meta.status == CLOSED: 7232 raise DbfError("%s is closed; cannot delete record" % record._meta.filename) 7233 record_in_flux = not record._write_to_disk 7234 if not template and not record_in_flux: 7235 record._start_flux() 7236 try: 7237 record._data[0] = ASTERISK 7238 if not template: 7239 record._dirty = True 7240 except: 7241 if not template and not record_in_flux: 7242 record._rollback_flux() 7243 raise 7244 if not template and not record_in_flux: 7245 record._commit_flux()
7246
7247 -def export(table_or_records, filename=None, field_names=None, format='csv', header=True, dialect='dbf', encoding=None):
7248 """ 7249 writes the records using CSV or tab-delimited format, using the filename 7250 given if specified, otherwise the table name 7251 if table_or_records is a collection of records (not an actual table) they 7252 should all be of the same format 7253 """ 7254 table = source_table(table_or_records[0]) 7255 if filename is None: 7256 filename = table.filename 7257 if field_names is None: 7258 field_names = table.field_names 7259 if isinstance(field_names, basestring): 7260 field_names = [f.strip() for f in field_names.split(',')] 7261 format = format.lower() 7262 if format not in ('csv', 'tab', 'fixed'): 7263 raise DbfError("export format: csv, tab, or fixed -- not %s" % format) 7264 if format == 'fixed': 7265 format = 'txt' 7266 if encoding is None: 7267 encoding = table.codepage.name 7268 encoder = codecs.getencoder(encoding) 7269 header_names = field_names 7270 base, ext = os.path.splitext(filename) 7271 if ext.lower() in ('', '.dbf'): 7272 filename = base + "." + format 7273 try: 7274 if format == 'csv': 7275 fd = open(filename, 'w', encoding=encoding) 7276 csvfile = csv.writer(fd, dialect=dialect) 7277 if header: 7278 csvfile.writerow(header_names) 7279 for record in table_or_records: 7280 fields = [] 7281 for fieldname in field_names: 7282 data = record[fieldname] 7283 # if isinstance(data, str): 7284 # fields.append(encoder(data)[0]) 7285 # else: 7286 fields.append(data) 7287 csvfile.writerow(fields) 7288 elif format == 'tab': 7289 fd = open(filename, 'w', encoding=encoding) 7290 if header: 7291 fd.write('\t'.join(header_names) + '\n') 7292 for record in table_or_records: 7293 fields = [] 7294 for fieldname in field_names: 7295 data = record[fieldname] 7296 # if isinstance(data, str): 7297 # fields.append(encoder(data)[0]) 7298 # else: 7299 fields.append(str(data)) 7300 fd.write('\t'.join(fields) + '\n') 7301 else: # format == 'fixed' 7302 fd = open(filename, 'w', encoding=encoding) 7303 header = open("%s_layout.txt" % os.path.splitext(filename)[0], 'w', encoding=encoding) 7304 header.write("%-15s Size\n" % "Field Name") 7305 header.write("%-15s ----\n" % ("-" * 15)) 7306 sizes = [] 7307 for field in field_names: 7308 size = table.field_info(field).length 7309 sizes.append(size) 7310 # field = encoder(field)[0] 7311 header.write("%-15s %3d\n" % (field, size)) 7312 header.write('\nTotal Records in file: %d\n' % len(table_or_records)) 7313 header.close() 7314 for record in table_or_records: 7315 fields = [] 7316 for i, fieldname in enumerate(field_names): 7317 data = record[fieldname] 7318 # if isinstance(data, str): 7319 # fields.append("%-*s" % (sizes[i], encoder(data)[0])) 7320 # else: 7321 fields.append("%-*s" % (sizes[i], data)) 7322 fd.write(''.join(fields) + '\n') 7323 finally: 7324 fd.close() 7325 fd = None 7326 return len(table_or_records)
7327
7328 -def field_names(thing):
7329 """ 7330 fields in table/record, keys in dict 7331 """ 7332 if isinstance(thing, dict): 7333 return list(thing.keys()) 7334 elif isinstance(thing, (Table, Record, RecordTemplate)): 7335 return thing._meta.user_fields[:] 7336 elif isinstance(thing, Index): 7337 return thing._table._meta.user_fields[:] 7338 else: 7339 for record in thing: # grab any record 7340 return record._meta.user_fields[:]
7341
7342 -def is_deleted(record):
7343 """ 7344 marked for deletion? 7345 """ 7346 return record._data[0] == ASTERISK
7347
7348 -def recno(record):
7349 """ 7350 physical record number 7351 """ 7352 return record._recnum
7353
7354 -def reset(record, keep_fields=None):
7355 """ 7356 sets record's fields back to original, except for fields in keep_fields 7357 """ 7358 template = record_in_flux = False 7359 if isinstance(record, RecordTemplate): 7360 template = True 7361 else: 7362 record_in_flux = not record._write_to_disk 7363 if record._meta.status == CLOSED: 7364 raise DbfError("%s is closed; cannot modify record" % record._meta.filename) 7365 if keep_fields is None: 7366 keep_fields = [] 7367 keep = {} 7368 for field in keep_fields: 7369 keep[field] = record[field] 7370 record._data[:] = record._meta.blankrecord[:] 7371 for field in keep_fields: 7372 record[field] = keep[field] 7373 if not template: 7374 if record._write_to_disk: 7375 record._write() 7376 else: 7377 record._dirty = True
7378
7379 -def source_table(thingie):
7380 """ 7381 table associated with table | record | index 7382 """ 7383 table = thingie._meta.table() 7384 if table is None: 7385 raise DbfError("table is no longer available") 7386 return table
7387
7388 -def undelete(record):
7389 """ 7390 marks record as active 7391 """ 7392 template = isinstance(record, RecordTemplate) 7393 if not template and record._meta.status == CLOSED: 7394 raise DbfError("%s is closed; cannot undelete record" % record._meta.filename) 7395 record_in_flux = not record._write_to_disk 7396 if not template and not record_in_flux: 7397 record._start_flux() 7398 try: 7399 record._data[0] = SPACE 7400 if not template: 7401 record._dirty = True 7402 except: 7403 if not template and not record_in_flux: 7404 record._rollback_flux() 7405 raise 7406 if not template and not record_in_flux: 7407 record._commit_flux()
7408 -def write(record, **kwargs):
7409 """ 7410 write record data to disk (updates indices) 7411 """ 7412 if record._meta.status == CLOSED: 7413 raise DbfError("%s is closed; cannot update record" % record._meta.filename) 7414 elif not record._write_to_disk: 7415 raise DbfError("unable to use .write_record() while record is in flux") 7416 if kwargs: 7417 gather(record, kwargs) 7418 if record._dirty: 7419 record._write()
7420
7421 -def Process(records, start=0, stop=None, filter=None):
7422 """commits each record to disk before returning the next one; undoes all changes to that record if exception raised 7423 if records is a table, it will be opened and closed if necessary 7424 filter function should return True to skip record, False to keep""" 7425 already_open = True 7426 if isinstance(records, Table): 7427 already_open = records.status != CLOSED 7428 if not already_open: 7429 records.open() 7430 try: 7431 if stop is None: 7432 stop = len(records) 7433 for record in records[start:stop]: 7434 if filter is not None and filter(record): 7435 continue 7436 try: 7437 record._start_flux() 7438 yield record 7439 except: 7440 record._rollback_flux() 7441 raise 7442 else: 7443 record._commit_flux() 7444 finally: 7445 if not already_open: 7446 records.close()
7447
7448 -def Templates(records, start=0, stop=None, filter=None):
7449 """ 7450 returns a template of each record instead of the record itself 7451 if records is a table, it will be opened and closed if necessary 7452 """ 7453 already_open = True 7454 if isinstance(records, Table): 7455 already_open = records.status != CLOSED 7456 if not already_open: 7457 records.open() 7458 try: 7459 if stop is None: 7460 stop = len(records) 7461 for record in records[start:stop]: 7462 if filter is not None and filter(record): 7463 continue 7464 yield(create_template(record)) 7465 finally: 7466 if not already_open: 7467 records.close()
7468
7469 -def index(sequence):
7470 """ 7471 returns integers 0 - len(sequence) 7472 """ 7473 for i in range(len(sequence)): 7474 yield i
7475
7476 -def guess_table_type(filename):
7477 reported = table_type(filename) 7478 possibles = [] 7479 version = reported[0] 7480 for tabletype in (Db3Table, ClpTable, FpTable, VfpTable): 7481 if version in tabletype._supported_tables: 7482 possibles.append((tabletype._versionabbr, tabletype._version, tabletype)) 7483 if not possibles: 7484 raise DbfError("Tables of type %s not supported" % str(reported)) 7485 return possibles
7486
7487 -def table_type(filename):
7488 """ 7489 returns text representation of a table's dbf version 7490 """ 7491 base, ext = os.path.splitext(filename) 7492 if ext == '': 7493 filename = base + '.[Dd][Bb][Ff]' 7494 matches = glob(filename) 7495 if matches: 7496 filename = matches[0] 7497 else: 7498 filename = base + '.dbf' 7499 if not os.path.exists(filename): 7500 raise DbfError('File %s not found' % filename) 7501 fd = open(filename, 'rb') 7502 version = fd.read(1) 7503 if version: 7504 [version] = version 7505 fd.close() 7506 fd = None 7507 if not version in version_map: 7508 raise DbfError("Unknown dbf type: %s (%x)" % (version, version)) 7509 return version, version_map[version]
7510
7511 -def add_fields(table_name, field_specs):
7512 """ 7513 adds fields to an existing table 7514 """ 7515 table = Table(table_name) 7516 table.open() 7517 try: 7518 table.add_fields(field_specs) 7519 finally: 7520 table.close()
7521
7522 -def delete_fields(table_name, field_names):
7523 """ 7524 deletes fields from an existing table 7525 """ 7526 table = Table(table_name) 7527 table.open() 7528 try: 7529 table.delete_fields(field_names) 7530 finally: 7531 table.close()
7532
7533 -def first_record(table_name):
7534 """ 7535 prints the first record of a table 7536 """ 7537 table = Table(table_name) 7538 table.open() 7539 try: 7540 print(str(table[0])) 7541 finally: 7542 table.close()
7543
7544 -def from_csv(csvfile, to_disk=False, filename=None, field_names=None, extra_fields=None, 7545 dbf_type='db3', memo_size=64, min_field_size=1, 7546 encoding=None, errors=None):
7547 """ 7548 creates a Character table from a csv file 7549 to_disk will create a table with the same name 7550 filename will be used if provided 7551 field_names default to f0, f1, f2, etc, unless specified (list) 7552 extra_fields can be used to add additional fields -- should be normal field specifiers (list) 7553 """ 7554 with codecs.open(csvfile, 'r', encoding='latin-1', errors=errors) as fd: 7555 reader = csv.reader(fd) 7556 if field_names: 7557 if isinstance(field_names, basestring): 7558 field_names = field_names.split() 7559 if ' ' not in field_names[0]: 7560 field_names = ['%s M' % fn for fn in field_names] 7561 else: 7562 field_names = ['f0 M'] 7563 mtable = Table(':memory:', [field_names[0]], dbf_type=dbf_type, memo_size=memo_size, codepage=encoding, on_disk=False) 7564 mtable.open() 7565 fields_so_far = 1 7566 #for row in reader: 7567 while reader: 7568 try: 7569 row = next(reader) 7570 except UnicodeEncodeError: 7571 row = [''] 7572 except StopIteration: 7573 break 7574 while fields_so_far < len(row): 7575 if fields_so_far == len(field_names): 7576 field_names.append('f%d M' % fields_so_far) 7577 mtable.add_fields(field_names[fields_so_far]) 7578 fields_so_far += 1 7579 mtable.append(tuple(row)) 7580 if filename: 7581 to_disk = True 7582 if not to_disk: 7583 if extra_fields: 7584 mtable.add_fields(extra_fields) 7585 else: 7586 if not filename: 7587 filename = os.path.splitext(csvfile)[0] 7588 length = [min_field_size] * len(field_names) 7589 for record in mtable: 7590 for i in index(mtable.field_names): 7591 length[i] = max(length[i], len(record[i])) 7592 fields = mtable.field_names 7593 fielddef = [] 7594 for i in index(length): 7595 if length[i] < 255: 7596 fielddef.append('%s C(%d)' % (fields[i], length[i])) 7597 else: 7598 fielddef.append('%s M' % (fields[i])) 7599 if extra_fields: 7600 fielddef.extend(extra_fields) 7601 csvtable = Table(filename, fielddef, dbf_type=dbf_type, codepage=encoding) 7602 csvtable.open() 7603 for record in mtable: 7604 csvtable.append(scatter(record)) 7605 csvtable.close() 7606 return csvtable 7607 mtable.close() 7608 return mtable
7609
7610 -def get_fields(table_name):
7611 """ 7612 returns the list of field names of a table 7613 """ 7614 table = Table(table_name) 7615 return table.field_names
7616
7617 -def info(table_name):
7618 """ 7619 prints table info 7620 """ 7621 table = Table(table_name) 7622 print(str(table))
7623
7624 -def rename_field(table_name, oldfield, newfield):
7625 """ 7626 renames a field in a table 7627 """ 7628 table = Table(table_name) 7629 try: 7630 table.rename_field(oldfield, newfield) 7631 finally: 7632 table.close()
7633
7634 -def structure(table_name, field=None):
7635 """ 7636 returns the definition of a field (or all fields) 7637 """ 7638 table = Table(table_name) 7639 return table.structure(field)
7640
7641 -def hex_dump(records):
7642 """ 7643 just what it says ;) 7644 """ 7645 for index, dummy in enumerate(records): 7646 chars = dummy._data 7647 print("%2d: " % (index,)) 7648 for char in chars[1:]: 7649 print(" %2x " % (char,)) 7650 print()
7651
7652 7653 # Foxpro functions 7654 7655 -def gather(record, data, drop=False):
7656 """ 7657 saves data into a record's fields; writes to disk if not in flux 7658 keys with no matching field will raise a FieldMissingError 7659 exception unless drop_missing == True; 7660 if an Exception occurs the record is restored before reraising 7661 """ 7662 if isinstance(record, Record) and record._meta.status == CLOSED: 7663 raise DbfError("%s is closed; cannot modify record" % record._meta.filename) 7664 record_in_flux = not record._write_to_disk 7665 if not record_in_flux: 7666 record._start_flux() 7667 try: 7668 record_fields = field_names(record) 7669 for key in field_names(data): 7670 value = data[key] 7671 if not key in record_fields: 7672 if drop: 7673 continue 7674 raise FieldMissingError(key) 7675 record[key] = value 7676 except: 7677 if not record_in_flux: 7678 record._rollback_flux() 7679 raise 7680 if not record_in_flux: 7681 record._commit_flux()
7682
7683 -def scan(table, direction='forward', filter=lambda rec: True):
7684 """ 7685 moves record pointer forward 1; returns False if Eof/Bof reached 7686 table must be derived from _Navigation or have skip() method 7687 """ 7688 if direction not in ('forward', 'reverse'): 7689 raise TypeError("direction should be 'forward' or 'reverse', not %r" % direction) 7690 if direction == 'forward': 7691 n = +1 7692 no_more_records = Eof 7693 else: 7694 n = -1 7695 no_more_records = Bof 7696 try: 7697 while True: 7698 table.skip(n) 7699 if filter(table.current_record): 7700 return True 7701 except no_more_records: 7702 return False
7703
7704 -def scatter(record, as_type=create_template, _mappings=getattr(collections, 'Mapping', dict)):
7705 """ 7706 returns as_type() of [fieldnames and] values. 7707 """ 7708 if isinstance(as_type, types.FunctionType): 7709 return as_type(record) 7710 elif issubclass(as_type, _mappings): 7711 return as_type(zip(field_names(record), record)) 7712 else: 7713 return as_type(record)
7714