您最多选择25个主题 主题必须以字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符

957 行
32KB

  1. # utils.py
  2. # Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
  3. #
  4. # This module is part of GitPython and is released under
  5. # the BSD License: http://www.opensource.org/licenses/bsd-license.php
  6. import contextlib
  7. from functools import wraps
  8. import getpass
  9. import logging
  10. import os
  11. import platform
  12. import subprocess
  13. import re
  14. import shutil
  15. import stat
  16. import time
  17. from unittest import SkipTest
  18. from gitdb.util import (# NOQA @IgnorePep8
  19. make_sha,
  20. LockedFD, # @UnusedImport
  21. file_contents_ro, # @UnusedImport
  22. file_contents_ro_filepath, # @UnusedImport
  23. LazyMixin, # @UnusedImport
  24. to_hex_sha, # @UnusedImport
  25. to_bin_sha, # @UnusedImport
  26. bin_to_hex, # @UnusedImport
  27. hex_to_bin, # @UnusedImport
  28. )
  29. from git.compat import is_win
  30. import os.path as osp
  31. from .compat import (
  32. MAXSIZE,
  33. defenc,
  34. PY3
  35. )
  36. from .exc import InvalidGitRepositoryError
  37. # NOTE: Some of the unused imports might be used/imported by others.
  38. # Handle once test-cases are back up and running.
  39. # Most of these are unused here, but are for use by git-python modules so these
  40. # don't see gitdb all the time. Flake of course doesn't like it.
  41. __all__ = ("stream_copy", "join_path", "to_native_path_windows", "to_native_path_linux",
  42. "join_path_native", "Stats", "IndexFileSHA1Writer", "Iterable", "IterableList",
  43. "BlockingLockFile", "LockFile", 'Actor', 'get_user_id', 'assure_directory_exists',
  44. 'RemoteProgress', 'CallableRemoteProgress', 'rmtree', 'unbare_repo',
  45. 'HIDE_WINDOWS_KNOWN_ERRORS')
  46. log = logging.getLogger(__name__)
  47. #: We need an easy way to see if Appveyor TCs start failing,
  48. #: so the errors marked with this var are considered "acknowledged" ones, awaiting remedy,
  49. #: till then, we wish to hide them.
  50. HIDE_WINDOWS_KNOWN_ERRORS = is_win and os.environ.get('HIDE_WINDOWS_KNOWN_ERRORS', True)
  51. HIDE_WINDOWS_FREEZE_ERRORS = is_win and os.environ.get('HIDE_WINDOWS_FREEZE_ERRORS', True)
  52. #{ Utility Methods
  53. def unbare_repo(func):
  54. """Methods with this decorator raise InvalidGitRepositoryError if they
  55. encounter a bare repository"""
  56. @wraps(func)
  57. def wrapper(self, *args, **kwargs):
  58. if self.repo.bare:
  59. raise InvalidGitRepositoryError("Method '%s' cannot operate on bare repositories" % func.__name__)
  60. # END bare method
  61. return func(self, *args, **kwargs)
  62. # END wrapper
  63. return wrapper
  64. @contextlib.contextmanager
  65. def cwd(new_dir):
  66. old_dir = os.getcwd()
  67. os.chdir(new_dir)
  68. try:
  69. yield new_dir
  70. finally:
  71. os.chdir(old_dir)
  72. def rmtree(path):
  73. """Remove the given recursively.
  74. :note: we use shutil rmtree but adjust its behaviour to see whether files that
  75. couldn't be deleted are read-only. Windows will not remove them in that case"""
  76. def onerror(func, path, exc_info):
  77. # Is the error an access error ?
  78. os.chmod(path, stat.S_IWUSR)
  79. try:
  80. func(path) # Will scream if still not possible to delete.
  81. except Exception as ex:
  82. if HIDE_WINDOWS_KNOWN_ERRORS:
  83. raise SkipTest("FIXME: fails with: PermissionError\n %s", ex)
  84. else:
  85. raise
  86. return shutil.rmtree(path, False, onerror)
  87. def rmfile(path):
  88. """Ensure file deleted also on *Windows* where read-only files need special treatment."""
  89. if osp.isfile(path):
  90. if is_win:
  91. os.chmod(path, 0o777)
  92. os.remove(path)
  93. def stream_copy(source, destination, chunk_size=512 * 1024):
  94. """Copy all data from the source stream into the destination stream in chunks
  95. of size chunk_size
  96. :return: amount of bytes written"""
  97. br = 0
  98. while True:
  99. chunk = source.read(chunk_size)
  100. destination.write(chunk)
  101. br += len(chunk)
  102. if len(chunk) < chunk_size:
  103. break
  104. # END reading output stream
  105. return br
  106. def join_path(a, *p):
  107. """Join path tokens together similar to osp.join, but always use
  108. '/' instead of possibly '\' on windows."""
  109. path = a
  110. for b in p:
  111. if len(b) == 0:
  112. continue
  113. if b.startswith('/'):
  114. path += b[1:]
  115. elif path == '' or path.endswith('/'):
  116. path += b
  117. else:
  118. path += '/' + b
  119. # END for each path token to add
  120. return path
  121. if is_win:
  122. def to_native_path_windows(path):
  123. return path.replace('/', '\\')
  124. def to_native_path_linux(path):
  125. return path.replace('\\', '/')
  126. to_native_path = to_native_path_windows
  127. else:
  128. # no need for any work on linux
  129. def to_native_path_linux(path):
  130. return path
  131. to_native_path = to_native_path_linux
  132. def join_path_native(a, *p):
  133. """
  134. As join path, but makes sure an OS native path is returned. This is only
  135. needed to play it safe on my dear windows and to assure nice paths that only
  136. use '\'"""
  137. return to_native_path(join_path(a, *p))
  138. def assure_directory_exists(path, is_file=False):
  139. """Assure that the directory pointed to by path exists.
  140. :param is_file: If True, path is assumed to be a file and handled correctly.
  141. Otherwise it must be a directory
  142. :return: True if the directory was created, False if it already existed"""
  143. if is_file:
  144. path = osp.dirname(path)
  145. # END handle file
  146. if not osp.isdir(path):
  147. os.makedirs(path)
  148. return True
  149. return False
  150. def _get_exe_extensions():
  151. PATHEXT = os.environ.get('PATHEXT', None)
  152. return tuple(p.upper() for p in PATHEXT.split(os.pathsep)) \
  153. if PATHEXT \
  154. else (('.BAT', 'COM', '.EXE') if is_win else ())
  155. def py_where(program, path=None):
  156. # From: http://stackoverflow.com/a/377028/548792
  157. winprog_exts = _get_exe_extensions()
  158. def is_exec(fpath):
  159. return osp.isfile(fpath) and os.access(fpath, os.X_OK) and (
  160. os.name != 'nt' or not winprog_exts or any(fpath.upper().endswith(ext)
  161. for ext in winprog_exts))
  162. progs = []
  163. if not path:
  164. path = os.environ["PATH"]
  165. for folder in path.split(os.pathsep):
  166. folder = folder.strip('"')
  167. if folder:
  168. exe_path = osp.join(folder, program)
  169. for f in [exe_path] + ['%s%s' % (exe_path, e) for e in winprog_exts]:
  170. if is_exec(f):
  171. progs.append(f)
  172. return progs
  173. def _cygexpath(drive, path):
  174. if osp.isabs(path) and not drive:
  175. ## Invoked from `cygpath()` directly with `D:Apps\123`?
  176. # It's an error, leave it alone just slashes)
  177. p = path
  178. else:
  179. p = path and osp.normpath(osp.expandvars(osp.expanduser(path)))
  180. if osp.isabs(p):
  181. if drive:
  182. # Confusing, maybe a remote system should expand vars.
  183. p = path
  184. else:
  185. p = cygpath(p)
  186. elif drive:
  187. p = '/cygdrive/%s/%s' % (drive.lower(), p)
  188. return p.replace('\\', '/')
  189. _cygpath_parsers = (
  190. ## See: https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx
  191. ## and: https://www.cygwin.com/cygwin-ug-net/using.html#unc-paths
  192. (re.compile(r"\\\\\?\\UNC\\([^\\]+)\\([^\\]+)(?:\\(.*))?"),
  193. (lambda server, share, rest_path: '//%s/%s/%s' % (server, share, rest_path.replace('\\', '/'))),
  194. False
  195. ),
  196. (re.compile(r"\\\\\?\\(\w):[/\\](.*)"),
  197. _cygexpath,
  198. False
  199. ),
  200. (re.compile(r"(\w):[/\\](.*)"),
  201. _cygexpath,
  202. False
  203. ),
  204. (re.compile(r"file:(.*)", re.I),
  205. (lambda rest_path: rest_path),
  206. True),
  207. (re.compile(r"(\w{2,}:.*)"), # remote URL, do nothing
  208. (lambda url: url),
  209. False),
  210. )
  211. def cygpath(path):
  212. """Use :meth:`git.cmd.Git.polish_url()` instead, that works on any environment."""
  213. if not path.startswith(('/cygdrive', '//')):
  214. for regex, parser, recurse in _cygpath_parsers:
  215. match = regex.match(path)
  216. if match:
  217. path = parser(*match.groups())
  218. if recurse:
  219. path = cygpath(path)
  220. break
  221. else:
  222. path = _cygexpath(None, path)
  223. return path
  224. _decygpath_regex = re.compile(r"/cygdrive/(\w)(/.*)?")
  225. def decygpath(path):
  226. m = _decygpath_regex.match(path)
  227. if m:
  228. drive, rest_path = m.groups()
  229. path = '%s:%s' % (drive.upper(), rest_path or '')
  230. return path.replace('/', '\\')
  231. #: Store boolean flags denoting if a specific Git executable
  232. #: is from a Cygwin installation (since `cache_lru()` unsupported on PY2).
  233. _is_cygwin_cache = {}
  234. def is_cygwin_git(git_executable):
  235. if not is_win:
  236. return False
  237. #from subprocess import check_output
  238. is_cygwin = _is_cygwin_cache.get(git_executable)
  239. if is_cygwin is None:
  240. is_cygwin = False
  241. try:
  242. git_dir = osp.dirname(git_executable)
  243. if not git_dir:
  244. res = py_where(git_executable)
  245. git_dir = osp.dirname(res[0]) if res else None
  246. ## Just a name given, not a real path.
  247. uname_cmd = osp.join(git_dir, 'uname')
  248. process = subprocess.Popen([uname_cmd], stdout=subprocess.PIPE,
  249. universal_newlines=True)
  250. uname_out, _ = process.communicate()
  251. #retcode = process.poll()
  252. is_cygwin = 'CYGWIN' in uname_out
  253. except Exception as ex:
  254. log.debug('Failed checking if running in CYGWIN due to: %r', ex)
  255. _is_cygwin_cache[git_executable] = is_cygwin
  256. return is_cygwin
  257. def get_user_id():
  258. """:return: string identifying the currently active system user as name@node"""
  259. return "%s@%s" % (getpass.getuser(), platform.node())
  260. def finalize_process(proc, **kwargs):
  261. """Wait for the process (clone, fetch, pull or push) and handle its errors accordingly"""
  262. ## TODO: No close proc-streams??
  263. proc.wait(**kwargs)
  264. def expand_path(p, expand_vars=True):
  265. try:
  266. p = osp.expanduser(p)
  267. if expand_vars:
  268. p = osp.expandvars(p)
  269. return osp.normpath(osp.abspath(p))
  270. except Exception:
  271. return None
  272. #} END utilities
  273. #{ Classes
  274. class RemoteProgress(object):
  275. """
  276. Handler providing an interface to parse progress information emitted by git-push
  277. and git-fetch and to dispatch callbacks allowing subclasses to react to the progress.
  278. """
  279. _num_op_codes = 9
  280. BEGIN, END, COUNTING, COMPRESSING, WRITING, RECEIVING, RESOLVING, FINDING_SOURCES, CHECKING_OUT = \
  281. [1 << x for x in range(_num_op_codes)]
  282. STAGE_MASK = BEGIN | END
  283. OP_MASK = ~STAGE_MASK
  284. DONE_TOKEN = 'done.'
  285. TOKEN_SEPARATOR = ', '
  286. __slots__ = ('_cur_line',
  287. '_seen_ops',
  288. 'error_lines', # Lines that started with 'error:' or 'fatal:'.
  289. 'other_lines') # Lines not denoting progress (i.e.g. push-infos).
  290. re_op_absolute = re.compile(r"(remote: )?([\w\s]+):\s+()(\d+)()(.*)")
  291. re_op_relative = re.compile(r"(remote: )?([\w\s]+):\s+(\d+)% \((\d+)/(\d+)\)(.*)")
  292. def __init__(self):
  293. self._seen_ops = []
  294. self._cur_line = None
  295. self.error_lines = []
  296. self.other_lines = []
  297. def _parse_progress_line(self, line):
  298. """Parse progress information from the given line as retrieved by git-push
  299. or git-fetch.
  300. - Lines that do not contain progress info are stored in :attr:`other_lines`.
  301. - Lines that seem to contain an error (i.e. start with error: or fatal:) are stored
  302. in :attr:`error_lines`.
  303. :return: list(line, ...) list of lines that could not be processed"""
  304. # handle
  305. # Counting objects: 4, done.
  306. # Compressing objects: 50% (1/2) \rCompressing objects: 100% (2/2) \rCompressing objects: 100% (2/2), done.
  307. self._cur_line = line = line.decode('utf-8') if isinstance(line, bytes) else line
  308. if len(self.error_lines) > 0 or self._cur_line.startswith(('error:', 'fatal:')):
  309. self.error_lines.append(self._cur_line)
  310. return []
  311. sub_lines = line.split('\r')
  312. failed_lines = []
  313. for sline in sub_lines:
  314. # find escape characters and cut them away - regex will not work with
  315. # them as they are non-ascii. As git might expect a tty, it will send them
  316. last_valid_index = None
  317. for i, c in enumerate(reversed(sline)):
  318. if ord(c) < 32:
  319. # its a slice index
  320. last_valid_index = -i - 1
  321. # END character was non-ascii
  322. # END for each character in sline
  323. if last_valid_index is not None:
  324. sline = sline[:last_valid_index]
  325. # END cut away invalid part
  326. sline = sline.rstrip()
  327. cur_count, max_count = None, None
  328. match = self.re_op_relative.match(sline)
  329. if match is None:
  330. match = self.re_op_absolute.match(sline)
  331. if not match:
  332. self.line_dropped(sline)
  333. failed_lines.append(sline)
  334. continue
  335. # END could not get match
  336. op_code = 0
  337. remote, op_name, percent, cur_count, max_count, message = match.groups() # @UnusedVariable
  338. # get operation id
  339. if op_name == "Counting objects":
  340. op_code |= self.COUNTING
  341. elif op_name == "Compressing objects":
  342. op_code |= self.COMPRESSING
  343. elif op_name == "Writing objects":
  344. op_code |= self.WRITING
  345. elif op_name == 'Receiving objects':
  346. op_code |= self.RECEIVING
  347. elif op_name == 'Resolving deltas':
  348. op_code |= self.RESOLVING
  349. elif op_name == 'Finding sources':
  350. op_code |= self.FINDING_SOURCES
  351. elif op_name == 'Checking out files':
  352. op_code |= self.CHECKING_OUT
  353. else:
  354. # Note: On windows it can happen that partial lines are sent
  355. # Hence we get something like "CompreReceiving objects", which is
  356. # a blend of "Compressing objects" and "Receiving objects".
  357. # This can't really be prevented, so we drop the line verbosely
  358. # to make sure we get informed in case the process spits out new
  359. # commands at some point.
  360. self.line_dropped(sline)
  361. # Note: Don't add this line to the failed lines, as we have to silently
  362. # drop it
  363. self.other_lines.extend(failed_lines)
  364. return failed_lines
  365. # END handle op code
  366. # figure out stage
  367. if op_code not in self._seen_ops:
  368. self._seen_ops.append(op_code)
  369. op_code |= self.BEGIN
  370. # END begin opcode
  371. if message is None:
  372. message = ''
  373. # END message handling
  374. message = message.strip()
  375. if message.endswith(self.DONE_TOKEN):
  376. op_code |= self.END
  377. message = message[:-len(self.DONE_TOKEN)]
  378. # END end message handling
  379. message = message.strip(self.TOKEN_SEPARATOR)
  380. self.update(op_code,
  381. cur_count and float(cur_count),
  382. max_count and float(max_count),
  383. message)
  384. # END for each sub line
  385. self.other_lines.extend(failed_lines)
  386. return failed_lines
  387. def new_message_handler(self):
  388. """
  389. :return:
  390. a progress handler suitable for handle_process_output(), passing lines on to this Progress
  391. handler in a suitable format"""
  392. def handler(line):
  393. return self._parse_progress_line(line.rstrip())
  394. # end
  395. return handler
  396. def line_dropped(self, line):
  397. """Called whenever a line could not be understood and was therefore dropped."""
  398. pass
  399. def update(self, op_code, cur_count, max_count=None, message=''):
  400. """Called whenever the progress changes
  401. :param op_code:
  402. Integer allowing to be compared against Operation IDs and stage IDs.
  403. Stage IDs are BEGIN and END. BEGIN will only be set once for each Operation
  404. ID as well as END. It may be that BEGIN and END are set at once in case only
  405. one progress message was emitted due to the speed of the operation.
  406. Between BEGIN and END, none of these flags will be set
  407. Operation IDs are all held within the OP_MASK. Only one Operation ID will
  408. be active per call.
  409. :param cur_count: Current absolute count of items
  410. :param max_count:
  411. The maximum count of items we expect. It may be None in case there is
  412. no maximum number of items or if it is (yet) unknown.
  413. :param message:
  414. In case of the 'WRITING' operation, it contains the amount of bytes
  415. transferred. It may possibly be used for other purposes as well.
  416. You may read the contents of the current line in self._cur_line"""
  417. pass
  418. class CallableRemoteProgress(RemoteProgress):
  419. """An implementation forwarding updates to any callable"""
  420. __slots__ = ('_callable')
  421. def __init__(self, fn):
  422. self._callable = fn
  423. super(CallableRemoteProgress, self).__init__()
  424. def update(self, *args, **kwargs):
  425. self._callable(*args, **kwargs)
  426. class Actor(object):
  427. """Actors hold information about a person acting on the repository. They
  428. can be committers and authors or anything with a name and an email as
  429. mentioned in the git log entries."""
  430. # PRECOMPILED REGEX
  431. name_only_regex = re.compile(r'<(.*)>')
  432. name_email_regex = re.compile(r'(.*) <(.*?)>')
  433. # ENVIRONMENT VARIABLES
  434. # read when creating new commits
  435. env_author_name = "GIT_AUTHOR_NAME"
  436. env_author_email = "GIT_AUTHOR_EMAIL"
  437. env_committer_name = "GIT_COMMITTER_NAME"
  438. env_committer_email = "GIT_COMMITTER_EMAIL"
  439. # CONFIGURATION KEYS
  440. conf_name = 'name'
  441. conf_email = 'email'
  442. __slots__ = ('name', 'email')
  443. def __init__(self, name, email):
  444. self.name = name
  445. self.email = email
  446. def __eq__(self, other):
  447. return self.name == other.name and self.email == other.email
  448. def __ne__(self, other):
  449. return not (self == other)
  450. def __hash__(self):
  451. return hash((self.name, self.email))
  452. def __str__(self):
  453. return self.name
  454. def __repr__(self):
  455. return u'<git.Actor "%s <%s>">' % (self.name, self.email)
  456. @classmethod
  457. def _from_string(cls, string):
  458. """Create an Actor from a string.
  459. :param string: is the string, which is expected to be in regular git format
  460. John Doe <jdoe@example.com>
  461. :return: Actor """
  462. m = cls.name_email_regex.search(string)
  463. if m:
  464. name, email = m.groups()
  465. return Actor(name, email)
  466. else:
  467. m = cls.name_only_regex.search(string)
  468. if m:
  469. return Actor(m.group(1), None)
  470. else:
  471. # assume best and use the whole string as name
  472. return Actor(string, None)
  473. # END special case name
  474. # END handle name/email matching
  475. @classmethod
  476. def _main_actor(cls, env_name, env_email, config_reader=None):
  477. actor = Actor('', '')
  478. default_email = get_user_id()
  479. default_name = default_email.split('@')[0]
  480. for attr, evar, cvar, default in (('name', env_name, cls.conf_name, default_name),
  481. ('email', env_email, cls.conf_email, default_email)):
  482. try:
  483. val = os.environ[evar]
  484. if not PY3:
  485. val = val.decode(defenc)
  486. # end assure we don't get 'invalid strings'
  487. setattr(actor, attr, val)
  488. except KeyError:
  489. if config_reader is not None:
  490. setattr(actor, attr, config_reader.get_value('user', cvar, default))
  491. # END config-reader handling
  492. if not getattr(actor, attr):
  493. setattr(actor, attr, default)
  494. # END handle name
  495. # END for each item to retrieve
  496. return actor
  497. @classmethod
  498. def committer(cls, config_reader=None):
  499. """
  500. :return: Actor instance corresponding to the configured committer. It behaves
  501. similar to the git implementation, such that the environment will override
  502. configuration values of config_reader. If no value is set at all, it will be
  503. generated
  504. :param config_reader: ConfigReader to use to retrieve the values from in case
  505. they are not set in the environment"""
  506. return cls._main_actor(cls.env_committer_name, cls.env_committer_email, config_reader)
  507. @classmethod
  508. def author(cls, config_reader=None):
  509. """Same as committer(), but defines the main author. It may be specified in the environment,
  510. but defaults to the committer"""
  511. return cls._main_actor(cls.env_author_name, cls.env_author_email, config_reader)
  512. class Stats(object):
  513. """
  514. Represents stat information as presented by git at the end of a merge. It is
  515. created from the output of a diff operation.
  516. ``Example``::
  517. c = Commit( sha1 )
  518. s = c.stats
  519. s.total # full-stat-dict
  520. s.files # dict( filepath : stat-dict )
  521. ``stat-dict``
  522. A dictionary with the following keys and values::
  523. deletions = number of deleted lines as int
  524. insertions = number of inserted lines as int
  525. lines = total number of lines changed as int, or deletions + insertions
  526. ``full-stat-dict``
  527. In addition to the items in the stat-dict, it features additional information::
  528. files = number of changed files as int"""
  529. __slots__ = ("total", "files")
  530. def __init__(self, total, files):
  531. self.total = total
  532. self.files = files
  533. @classmethod
  534. def _list_from_string(cls, repo, text):
  535. """Create a Stat object from output retrieved by git-diff.
  536. :return: git.Stat"""
  537. hsh = {'total': {'insertions': 0, 'deletions': 0, 'lines': 0, 'files': 0}, 'files': {}}
  538. for line in text.splitlines():
  539. (raw_insertions, raw_deletions, filename) = line.split("\t")
  540. insertions = raw_insertions != '-' and int(raw_insertions) or 0
  541. deletions = raw_deletions != '-' and int(raw_deletions) or 0
  542. hsh['total']['insertions'] += insertions
  543. hsh['total']['deletions'] += deletions
  544. hsh['total']['lines'] += insertions + deletions
  545. hsh['total']['files'] += 1
  546. hsh['files'][filename.strip()] = {'insertions': insertions,
  547. 'deletions': deletions,
  548. 'lines': insertions + deletions}
  549. return Stats(hsh['total'], hsh['files'])
  550. class IndexFileSHA1Writer(object):
  551. """Wrapper around a file-like object that remembers the SHA1 of
  552. the data written to it. It will write a sha when the stream is closed
  553. or if the asked for explicitly using write_sha.
  554. Only useful to the indexfile
  555. :note: Based on the dulwich project"""
  556. __slots__ = ("f", "sha1")
  557. def __init__(self, f):
  558. self.f = f
  559. self.sha1 = make_sha(b"")
  560. def write(self, data):
  561. self.sha1.update(data)
  562. return self.f.write(data)
  563. def write_sha(self):
  564. sha = self.sha1.digest()
  565. self.f.write(sha)
  566. return sha
  567. def close(self):
  568. sha = self.write_sha()
  569. self.f.close()
  570. return sha
  571. def tell(self):
  572. return self.f.tell()
  573. class LockFile(object):
  574. """Provides methods to obtain, check for, and release a file based lock which
  575. should be used to handle concurrent access to the same file.
  576. As we are a utility class to be derived from, we only use protected methods.
  577. Locks will automatically be released on destruction"""
  578. __slots__ = ("_file_path", "_owns_lock")
  579. def __init__(self, file_path):
  580. self._file_path = file_path
  581. self._owns_lock = False
  582. def __del__(self):
  583. self._release_lock()
  584. def _lock_file_path(self):
  585. """:return: Path to lockfile"""
  586. return "%s.lock" % (self._file_path)
  587. def _has_lock(self):
  588. """:return: True if we have a lock and if the lockfile still exists
  589. :raise AssertionError: if our lock-file does not exist"""
  590. return self._owns_lock
  591. def _obtain_lock_or_raise(self):
  592. """Create a lock file as flag for other instances, mark our instance as lock-holder
  593. :raise IOError: if a lock was already present or a lock file could not be written"""
  594. if self._has_lock():
  595. return
  596. lock_file = self._lock_file_path()
  597. if osp.isfile(lock_file):
  598. raise IOError("Lock for file %r did already exist, delete %r in case the lock is illegal" %
  599. (self._file_path, lock_file))
  600. try:
  601. flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL
  602. if is_win:
  603. flags |= os.O_SHORT_LIVED
  604. fd = os.open(lock_file, flags, 0)
  605. os.close(fd)
  606. except OSError as e:
  607. raise IOError(str(e))
  608. self._owns_lock = True
  609. def _obtain_lock(self):
  610. """The default implementation will raise if a lock cannot be obtained.
  611. Subclasses may override this method to provide a different implementation"""
  612. return self._obtain_lock_or_raise()
  613. def _release_lock(self):
  614. """Release our lock if we have one"""
  615. if not self._has_lock():
  616. return
  617. # if someone removed our file beforhand, lets just flag this issue
  618. # instead of failing, to make it more usable.
  619. lfp = self._lock_file_path()
  620. try:
  621. rmfile(lfp)
  622. except OSError:
  623. pass
  624. self._owns_lock = False
  625. class BlockingLockFile(LockFile):
  626. """The lock file will block until a lock could be obtained, or fail after
  627. a specified timeout.
  628. :note: If the directory containing the lock was removed, an exception will
  629. be raised during the blocking period, preventing hangs as the lock
  630. can never be obtained."""
  631. __slots__ = ("_check_interval", "_max_block_time")
  632. def __init__(self, file_path, check_interval_s=0.3, max_block_time_s=MAXSIZE):
  633. """Configure the instance
  634. :param check_interval_s:
  635. Period of time to sleep until the lock is checked the next time.
  636. By default, it waits a nearly unlimited time
  637. :param max_block_time_s: Maximum amount of seconds we may lock"""
  638. super(BlockingLockFile, self).__init__(file_path)
  639. self._check_interval = check_interval_s
  640. self._max_block_time = max_block_time_s
  641. def _obtain_lock(self):
  642. """This method blocks until it obtained the lock, or raises IOError if
  643. it ran out of time or if the parent directory was not available anymore.
  644. If this method returns, you are guaranteed to own the lock"""
  645. starttime = time.time()
  646. maxtime = starttime + float(self._max_block_time)
  647. while True:
  648. try:
  649. super(BlockingLockFile, self)._obtain_lock()
  650. except IOError:
  651. # synity check: if the directory leading to the lockfile is not
  652. # readable anymore, raise an exception
  653. curtime = time.time()
  654. if not osp.isdir(osp.dirname(self._lock_file_path())):
  655. msg = "Directory containing the lockfile %r was not readable anymore after waiting %g seconds" % (
  656. self._lock_file_path(), curtime - starttime)
  657. raise IOError(msg)
  658. # END handle missing directory
  659. if curtime >= maxtime:
  660. msg = "Waited %g seconds for lock at %r" % (maxtime - starttime, self._lock_file_path())
  661. raise IOError(msg)
  662. # END abort if we wait too long
  663. time.sleep(self._check_interval)
  664. else:
  665. break
  666. # END endless loop
  667. class IterableList(list):
  668. """
  669. List of iterable objects allowing to query an object by id or by named index::
  670. heads = repo.heads
  671. heads.master
  672. heads['master']
  673. heads[0]
  674. It requires an id_attribute name to be set which will be queried from its
  675. contained items to have a means for comparison.
  676. A prefix can be specified which is to be used in case the id returned by the
  677. items always contains a prefix that does not matter to the user, so it
  678. can be left out."""
  679. __slots__ = ('_id_attr', '_prefix')
  680. def __new__(cls, id_attr, prefix=''):
  681. return super(IterableList, cls).__new__(cls)
  682. def __init__(self, id_attr, prefix=''):
  683. self._id_attr = id_attr
  684. self._prefix = prefix
  685. def __contains__(self, attr):
  686. # first try identity match for performance
  687. try:
  688. rval = list.__contains__(self, attr)
  689. if rval:
  690. return rval
  691. except (AttributeError, TypeError):
  692. pass
  693. # END handle match
  694. # otherwise make a full name search
  695. try:
  696. getattr(self, attr)
  697. return True
  698. except (AttributeError, TypeError):
  699. return False
  700. # END handle membership
  701. def __getattr__(self, attr):
  702. attr = self._prefix + attr
  703. for item in self:
  704. if getattr(item, self._id_attr) == attr:
  705. return item
  706. # END for each item
  707. return list.__getattribute__(self, attr)
  708. def __getitem__(self, index):
  709. if isinstance(index, int):
  710. return list.__getitem__(self, index)
  711. try:
  712. return getattr(self, index)
  713. except AttributeError:
  714. raise IndexError("No item found with id %r" % (self._prefix + index))
  715. # END handle getattr
  716. def __delitem__(self, index):
  717. delindex = index
  718. if not isinstance(index, int):
  719. delindex = -1
  720. name = self._prefix + index
  721. for i, item in enumerate(self):
  722. if getattr(item, self._id_attr) == name:
  723. delindex = i
  724. break
  725. # END search index
  726. # END for each item
  727. if delindex == -1:
  728. raise IndexError("Item with name %s not found" % name)
  729. # END handle error
  730. # END get index to delete
  731. list.__delitem__(self, delindex)
  732. class Iterable(object):
  733. """Defines an interface for iterable items which is to assure a uniform
  734. way to retrieve and iterate items within the git repository"""
  735. __slots__ = ()
  736. _id_attribute_ = "attribute that most suitably identifies your instance"
  737. @classmethod
  738. def list_items(cls, repo, *args, **kwargs):
  739. """
  740. Find all items of this type - subclasses can specify args and kwargs differently.
  741. If no args are given, subclasses are obliged to return all items if no additional
  742. arguments arg given.
  743. :note: Favor the iter_items method as it will
  744. :return:list(Item,...) list of item instances"""
  745. out_list = IterableList(cls._id_attribute_)
  746. out_list.extend(cls.iter_items(repo, *args, **kwargs))
  747. return out_list
  748. @classmethod
  749. def iter_items(cls, repo, *args, **kwargs):
  750. """For more information about the arguments, see list_items
  751. :return: iterator yielding Items"""
  752. raise NotImplementedError("To be implemented by Subclass")
  753. #} END classes
  754. class NullHandler(logging.Handler):
  755. def emit(self, record):
  756. pass
  757. # In Python 2.6, there is no NullHandler yet. Let's monkey-patch it for a workaround.
  758. if not hasattr(logging, 'NullHandler'):
  759. logging.NullHandler = NullHandler