rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
url += urllib.quote_plus('?next=/%s' % request.META['HTTP_REFERER'].split('/', 3)[-1])
referer = request.META.get('HTTP_REFERER') if referer: url += urllib.quote_plus('?next=/%s' % referer.split('/', 3)[-1])
def usage(request, slug, action): success = False # Check if the user is authenticated, redirecting them to the login page if # they're not. if not request.user.is_authenticated(): url = settings.LOGIN_URL + '?next=%s' % reverse('usage', args=(slug, action)) url += urllib.quote_plus('?next=/%s' % request.META['HTTP_REFERER'].split('/', 3)[-1]) if request.is_ajax(): response = {} response['success'] = success response['redirect'] = url return HttpResponse(simplejson.dumps(response)) return HttpResponseRedirect(url) package = get_object_or_404(Package, slug=slug) # Update the current user's usage of the given package as specified by the # request. if package.usage.filter(username=request.user.username): if action.lower() == 'remove': package.usage.remove(request.user) success = True template_name = 'package/add_usage_button.html' change = -1 else: if action.lower() == 'add': package.usage.add(request.user) success = True template_name = 'package/remove_usage_button.html' change = 1 # Invalidate the cache of this users's used_packages_list. if success: cache_key = "sitewide:used_packages_list:%s" % request.user.pk cache.delete(cache_key) # Return an ajax-appropriate response if necessary if request.is_ajax(): response = {'success': success} if success: response['change'] = change response['body'] = render_to_string( template_name, {"package": package}, ) return HttpResponse(simplejson.dumps(response)) # Intelligently determine the URL to redirect the user to based on the # available information. next = request.GET.get('next') or request.META.get("HTTP_REFERER") or reverse("package", kwargs={"slug": package.slug}) return HttpResponseRedirect(next)
<li><strong>Utilities</strong> are apps, frameworks or sites but help Django.</li>
<li><strong>Utilities</strong> are not apps, frameworks or sites but still help Django in some way.</li>
def __unicode__(self): if not self.is_supported: return '%s (unsupported)' % self.title return self.title
self.save()
def fetch_metadata(self, *args, **kwargs): # Get the downloads from pypi if self.pypi_url.strip() and self.pypi_url != "http://pypi.python.org/pypi/": total_downloads = 0 for release in fetch_releases(self.pypi_name()): version, created = Version.objects.get_or_create( package = self, number = release.version )
django_dash = 'django-%s' % letter django_space = 'django %s' % letter packages = Package.objects.filter( Q(title__istartswith=letter) | Q(title__istartswith=django_dash) | Q(title__istartswith=django_space))
letter = letter.lower() if letter == 'd': packages = Package.objects.filter(title__istartswith=letter).exclude(title__istartswith='django-').exclude(title__istartswith='django ') else: django_dash = 'django-%s' % letter django_space = 'django %s' % letter packages = Package.objects.filter( Q(title__istartswith=letter) | Q(title__istartswith=django_dash) | Q(title__istartswith=django_space))
def handle(self, *args, **options):
for package in Package.objects.filter(title__startswith=letter):
for package in packages:
def handle(self, *args, **options):
print >> stdout, "%s packages updated" % index +1
print >> stdout, "%s packages updated" % int(index) + 1
def handle(self, *args, **options): print >> stdout, "Commencing package updating now" github = Github()
match = downloads_re.search(page).group()
match = downloads_re.search(page)
def save(self, *args, **kwargs): # Get the downloads from pypi if self.pypi_url: page = urlopen(self.pypi_url).read() # If the target page is an Index of packages if 'Index of Packages' in page: if self.pypi_url.endswith('/'): project_name = self.pypi_url[:-1] else: project_name = self.pypi_url project_name = os.path.split(project_name)[1] logging.debug(project_name) page_re = re.compile(r'<a href="/pypi/%s/([a-zA-Z0-9\.\-\_]{1,})">' % project_name) match = page_re.search(page).group() if match: url = match.replace('<a href="', 'http://pypi.python.org') url = url.replace('">', '') page = urlopen(url).read() else: raise NoPyPiVersionFound('self.pypi_url') # We have a working page so grab the package info match = downloads_re.search(page).group() if match: self.pypi_downloads = match.replace('<td style="text-align: right;">', '') self.pypi_downloads = self.pypi_downloads.replace('</td>', '') self.pypi_downloads = int(self.pypi_downloads) else: self.pypi_downloads = 0 # get the version off of Pypi doap match = doap_re.search(page).group() if match: url = 'http://pypi.python.org%s' % match doap = urlopen(url).read() match = version_re.search(doap).group() self.pypi_version = match.replace('<revision>','').replace('</revision>','') # Get the repo watchers number # TODO - make this abstracted so we can plug in other repos if self.repo.is_supported and 'Github' in self.repo.title and self.repo_url: github = Github() repo_name = self.repo_name() repo = github.repos.show(repo_name) self.repo_watchers = repo.watchers self.repo_forks = repo.forks self.repo_description = repo.description # TODO find out why repo commits limit on github to the first 35 #self.repo_commits = len(github.commits.list(repo_name, "master"))
logging.error('No serial port is available.') logging.error('Are the game buttons connected?')
print 'No serial port is available.' print 'Are the game buttons connected?'
def __init__(self): """ Setup the serial system. """ super(GameButtons, self).__init__()
self.daily_double_overlay(self.get_width(), self.get_height())
self.daily_double_overlay.set_size(self.get_width(), self.get_height())
def on_allocation_changed(self, stage, box, flags): """ """ logging.debug("self size %d x %d and stage size %d x %d" % (self.get_width(), self.get_height(), stage.get_width(), stage.get_height())) self.clue_overlay.set_size(self.get_width(), self.get_height()) self.daily_double_overlay(self.get_width(), self.get_height()) self.game_board.set_size(self.get_width() * 0.9, self.get_height()) self.board_box.set_size(self.get_width(), self.get_height()) self.category_overlay.set_size(self.get_width(), self.get_height()) self.player_buzz_overlay.set_size(self.get_width(), self.get_height())
logging.debug('called set text.')
def set_text(self, font, text): """ """ logging.debug('called set text.') self.remove(self.text) self.text = Text(font, text) self.text.set_size(self.get_width(), self.get_height()) scale_x, scale_y = self.get_scale() print scale_x self.text.set_scale(scale_x, scale_y) self.add(self.text)
logging.debug("Entered clue square on_paint.")
def on_paint(self, actor): logging.debug("Entered clue square on_paint.") if self.clue.state == 'unanswered': self.set_text(config.clue_value_font, self.clue.get_value()) self.set_color(config.clue_value_color) elif self.clue.state == 'selected': self.set_text(config.clue_font, self.clue.answer) elif self.clue.state == 'question': self.set_text(config.clue_font, self.clue.question) elif self.clue.state == 'answered': self.set_text(config.clue_font, '')
absolute_path = path
absolute_path = location
def _find_config_file(self): absolute_path = None for location in CONFIG_LOCATIONS: if os.path.isfile(location): absolute_path = path return absolute_path
self.set_text('')
self.set_text('', '')
def on_click(self, actor, event): """ """ logging.debug("Clue square click! state = %s" % self.clue.state) if self.clue.state == 'unanswered': self.clue.state = 'selected' logging.debug("Clue answer = %s" % self.clue.get_answer()) self.set_text('')
self.game_board = GameBoard() self.game_board.set_scale(0.6, 0.6) self.add(self.game_board)
def __init__(self): super(IdleBox, self).__init__(clutter.FlowLayout(clutter.FLOW_VERTICAL))
self.game_board.set_size(width, height) self.game_board.set_scale(0.6, 0.6)
def set_size(self, width, height): super(IdleBox, self).set_size(width, height) self.game_board.set_size(width, height) self.game_board.set_scale(0.6, 0.6)
self.add(self.text) class DailyDoubleOverlay(clutter.Box): """ """ def __init__(self): """ """ super(DailyDoubleOverlay, self).__init__(clutter.BinLayout( clutter.BIN_ALIGNMENT_CENTER, clutter.BIN_ALIGNMENT_CENTER)) self.set_color(config.square_background_color) self.text = Text(config.player_overlay_font, 'Daily\nDouble')
def set_text(self, text): """ """ self.remove(self.text) self.text = Text(config.player_overlay_font, text) self.add(self.text)
self.board_box.add(self.game_board)
board_box_layout.pack(self.game_board, True, True, True, clutter.BOX_ALIGNMENT_CENTER, clutter.BOX_ALIGNMENT_CENTER)
def __init__(self): super(GUI, self).__init__()
board_box_layout = self.board_box.get_layout_manager()
self.player_score_box.set_width(0.1 * self.get_width())
def __init__(self): super(GUI, self).__init__()
self.game_board.set_size(self.get_width() * 0.9, self.get_height())
self.board_box.set_size(self.get_width(), self.get_height()) if config.display_player_scores: self.player_score_box.set_size(0.1 * self.get_width(), self.get_height()) self.game_board.set_size(0.9 * self.get_width(), self.get_height()) else: self.game_board.set_size(self.get_width, self.get_height())
def on_allocation_changed(self, stage, box, flags): """ """ logging.debug("self size %d x %d and stage size %d x %d" % (self.get_width(), self.get_height(), stage.get_width(), stage.get_height())) self.clue_overlay.set_size(self.get_width(), self.get_height()) self.daily_double_overlay.set_size(self.get_width(), self.get_height()) self.game_board.set_size(self.get_width() * 0.9, self.get_height()) self.board_box.set_size(self.get_width(), self.get_height()) self.category_overlay.set_size(self.get_width(), self.get_height()) self.player_buzz_overlay.set_size(self.get_width(), self.get_height()) self.daily_double_overlay.set_size(self.get_width(), self.get_height())
self.remove(self.game_board)
self.board_box.remove(self.game_board)
def update(self): """ Update the GUI based on the current state of the game. """ if game.check_update_game_board(): self.remove(self.game_board) self.game_board = GameBoard() self.game_board.set_size(self.get_width(), self.get_height()) self.add(self.game_board) self.game_board.lower_bottom() if game.check_timeout_beep(): logging.debug("****************** BZZZZZT! ******************") tex = cluttergst.VideoTexture() tex.set_filename(config.sound_timeout) tex.set_playing(True) if game.check_flash_player_name(): player_name = game.players[game.buzzed_player].name self.player_buzz_overlay.set_opacity(255) self.player_buzz_overlay.set_text(player_name) self.player_buzz_overlay.animate(clutter.EASE_IN_CUBIC, 1000, 'opacity', 0) if game.check_flash_player_score(): player = game.players[game.buzzed_player] text = '%s\n$%d' % (player.name, player.score) self.player_buzz_overlay.set_opacity(255) self.player_buzz_overlay.set_text(text) self.player_buzz_overlay.animate(clutter.EASE_IN_CUBIC, 1000, 'opacity', 0) if game.check_flash_daily_double(): self.daily_double_overlay.set_opacity(255) tex = cluttergst.VideoTexture() tex.set_filename(config.sound_daily_double) tex.set_playing(True) if game.check_clear_daily_double(): self.daily_double_overlay.animate(clutter.EASE_IN_CUBIC, 1000, 'opacity', 0)
self.game_board.set_size(self.get_width(), self.get_height()) self.add(self.game_board)
self.board_box.add(self.game_board) if config.display_player_scores: self.game_board.set_size(0.9 * self.get_width(), self.get_height()) else: self.game_board.set_size(self.get_width, self.get_height())
def update(self): """ Update the GUI based on the current state of the game. """ if game.check_update_game_board(): self.remove(self.game_board) self.game_board = GameBoard() self.game_board.set_size(self.get_width(), self.get_height()) self.add(self.game_board) self.game_board.lower_bottom() if game.check_timeout_beep(): logging.debug("****************** BZZZZZT! ******************") tex = cluttergst.VideoTexture() tex.set_filename(config.sound_timeout) tex.set_playing(True) if game.check_flash_player_name(): player_name = game.players[game.buzzed_player].name self.player_buzz_overlay.set_opacity(255) self.player_buzz_overlay.set_text(player_name) self.player_buzz_overlay.animate(clutter.EASE_IN_CUBIC, 1000, 'opacity', 0) if game.check_flash_player_score(): player = game.players[game.buzzed_player] text = '%s\n$%d' % (player.name, player.score) self.player_buzz_overlay.set_opacity(255) self.player_buzz_overlay.set_text(text) self.player_buzz_overlay.animate(clutter.EASE_IN_CUBIC, 1000, 'opacity', 0) if game.check_flash_daily_double(): self.daily_double_overlay.set_opacity(255) tex = cluttergst.VideoTexture() tex.set_filename(config.sound_daily_double) tex.set_playing(True) if game.check_clear_daily_double(): self.daily_double_overlay.animate(clutter.EASE_IN_CUBIC, 1000, 'opacity', 0)
self.model = model layout = self.get_layout_manager() layout.set_vertical(False) spacing = int(self.get_width() * 0.01) layout.set_spacing(spacing) self.categories = [] category_number = 0 round = self.model.get_round() for category in round: category = Category(model, category, category_number) self.add(category) category.set_size(self.get_width() / len(round) - spacing, self.get_height()) self.categories.append(category) category_number += 1 def set_click_handler(self, click_handler):
def __init__(self):
def __init__(self, model): super(GameBoard, self).__init__(clutter.BoxLayout())
for category in self.categories: category.set_click_handler(click_handler)
super(Model, self).__init__()
def set_click_handler(self, click_handler): """ """ for category in self.categories: category.set_click_handler(click_handler)
self.players.index(player)
return self.players.index(player)
def get_player_index(self, player): """ Given a player return that player's index. """ self.players.index(player)
bits = next(31)
bits = self.next(31)
def nextInt(self, n = None): """ Return a random int in [0, `n`).
return bool(next(1))
return bool(self.next(1))
def nextBoolean(self): """ Return a random bool. """
return next(24) / float(1 << 24)
return self.next(24) / float(1 << 24)
def nextFloat(self): """ Return a random float in (0, 1).
return ((next(26) << 27) + next(27)) / float(1 << 53)
return ((self.next(26) << 27) + self.next(27)) / float(1 << 53)
def nextDouble(self): """ Return a random float in (0, 1). """
retval = (self.next(32) << 32) + self.next(32) if retval & (1 << 63): retval -= (1 << 64) return retval
return (self.next(32) << 32) + self.next(32)
def nextLong(self): """ Return a random long.
haveEvents = firstEvent != u''
haveEvents = (firstEvent.id != u'')
def _getPartyStatus(self, now, nextEvents): firstEvent = self.context.getFirstEvent(self.request) lastEvent = self.context.getLastEvent(self.request)
clshideUntil = datetime.timedelta(seconds=config.defaultHideTime)
cls.hideUntil = datetime.timedelta(seconds=config.defaultHideTime)
def _initialize(config): cls = NoneLocation_cls cls.priority = config.defaultLocationPriority clshideUntil = datetime.timedelta(seconds=config.defaultHideTime)
self._gen = scheduler.run()
self._gen = self._scheduler.run()
def reinit (self) : """Restart scheduler from 0 """ self._scheduler._tasks = list(self._initial_state) #TODO hack pabo self._current_step = 0 self._gen = scheduler.run() if self._init_func is not None : self._init_func()
import copy return (copy.copy(inputs[0]), )
return (dict(inputs[0]), )
def __call__(self, inputs): """ inputs is the list of input values """ import copy return (copy.copy(inputs[0]), )
while (not loc_mask[bb].any() ) and (bb[ind] < imax) :
while (bb[ind] < imax) and (not loc_mask[bb].any() ) :
def bounding_box (mask) : """Compute the bounding box of a mask :Parameters: - `mask` (array of bool) - a nd array of booleans :Returns: a slice (ind_min,ind_max) for each dimension of the mask or None if the mask do not contain any True value. Where ind_min correspond to the first slice that contains a True value and ind_max correspond to the first slice that contains only False after slices that contain at least one True value :Returns Type: list of (int,int) """ loc_mask = mask bb = [slice(0,m) for m in mask.shape] for ind in range(len(mask.shape) ) : #find bounding box along ind axis imax = mask.shape[ind] #find imin bb[ind] = 0 while (not loc_mask[bb].any() ) and (bb[ind] < imax) : bb[ind] += 1 if bb[ind] == imax : return None bbimin = bb[ind] #find imax bb[ind] += 1 while mask[bb].any() and (bb[ind] < imax) : bb[ind] += 1 bbimax = bb[ind] #restore slice bb[ind] = slice(bbimin,bbimax) return bb
while mask[bb].any() and (bb[ind] < imax) :
while (bb[ind] < imax) and mask[bb].any() :
def bounding_box (mask) : """Compute the bounding box of a mask :Parameters: - `mask` (array of bool) - a nd array of booleans :Returns: a slice (ind_min,ind_max) for each dimension of the mask or None if the mask do not contain any True value. Where ind_min correspond to the first slice that contains a True value and ind_max correspond to the first slice that contains only False after slices that contain at least one True value :Returns Type: list of (int,int) """ loc_mask = mask bb = [slice(0,m) for m in mask.shape] for ind in range(len(mask.shape) ) : #find bounding box along ind axis imax = mask.shape[ind] #find imin bb[ind] = 0 while (not loc_mask[bb].any() ) and (bb[ind] < imax) : bb[ind] += 1 if bb[ind] == imax : return None bbimin = bb[ind] #find imax bb[ind] += 1 while mask[bb].any() and (bb[ind] < imax) : bb[ind] += 1 bbimax = bb[ind] #restore slice bb[ind] = slice(bbimin,bbimax) return bb
def flatten (img_list) :
def flatten (img_list, alpha = False) :
def flatten (img_list) : """Concatenate all images into a single image Use alpha to blend images one on top of each other .. warning:: all images must have the same shape either 2D or 3D and an alpha channel .. warning:: the resulting image has no alpha channel :Parameters: - `img_list` (list of NxM(xP)x4 array of uint8) :Returns Type: NxM(xP)x3 array of uint8 """ R = img_list[0][...,0] G = img_list[0][...,1] B = img_list[0][...,2] for lay in img_list[1:] : alpha = lay[...,3] / 255. ialpha = 1. - alpha R = R * ialpha + lay[...,0] * alpha G = G * ialpha + lay[...,1] * alpha B = B * ialpha + lay[...,2] * alpha return rollaxis(array([R,G,B],img_list[0].dtype),0,len(img_list[0].shape) )
.. warning:: all images must have the same shape either 2D or 3D and an alpha channel .. warning:: the resulting image has no alpha channel
.. warning:: all images must have the same nD shape and an alpha channel (except maybe for the first one) If alpha is True, the resulting image will use the max of all alpha channels as an alpha channel. .. warning:: if the first image is a SpatialImage, the resulting image will also be a SpatialImage but no test is made to ensure consistency in the resolution of the layers
def flatten (img_list) : """Concatenate all images into a single image Use alpha to blend images one on top of each other .. warning:: all images must have the same shape either 2D or 3D and an alpha channel .. warning:: the resulting image has no alpha channel :Parameters: - `img_list` (list of NxM(xP)x4 array of uint8) :Returns Type: NxM(xP)x3 array of uint8 """ R = img_list[0][...,0] G = img_list[0][...,1] B = img_list[0][...,2] for lay in img_list[1:] : alpha = lay[...,3] / 255. ialpha = 1. - alpha R = R * ialpha + lay[...,0] * alpha G = G * ialpha + lay[...,1] * alpha B = B * ialpha + lay[...,2] * alpha return rollaxis(array([R,G,B],img_list[0].dtype),0,len(img_list[0].shape) )
:Returns Type: NxM(xP)x3 array of uint8 """ R = img_list[0][...,0] G = img_list[0][...,1] B = img_list[0][...,2]
- `alpha` (bool) - the resulting image will have an alpha channel or not :Returns Type: NxM(xP)x3(4) array of uint8 """ bg = img_list[0] R = bg[...,0] G = bg[...,1] B = bg[...,2] if bg.shape[-1] == 4 : alpha_list = [bg[...,3] ] else : alpha_list = []
def flatten (img_list) : """Concatenate all images into a single image Use alpha to blend images one on top of each other .. warning:: all images must have the same shape either 2D or 3D and an alpha channel .. warning:: the resulting image has no alpha channel :Parameters: - `img_list` (list of NxM(xP)x4 array of uint8) :Returns Type: NxM(xP)x3 array of uint8 """ R = img_list[0][...,0] G = img_list[0][...,1] B = img_list[0][...,2] for lay in img_list[1:] : alpha = lay[...,3] / 255. ialpha = 1. - alpha R = R * ialpha + lay[...,0] * alpha G = G * ialpha + lay[...,1] * alpha B = B * ialpha + lay[...,2] * alpha return rollaxis(array([R,G,B],img_list[0].dtype),0,len(img_list[0].shape) )
alpha = lay[...,3] / 255. ialpha = 1. - alpha R = R * ialpha + lay[...,0] * alpha G = G * ialpha + lay[...,1] * alpha B = B * ialpha + lay[...,2] * alpha return rollaxis(array([R,G,B],img_list[0].dtype),0,len(img_list[0].shape) )
A = lay[...,3] alpha_list.append(A) A = A / 255. iA = 1. - A R = R * iA + lay[...,0] * A G = G * iA + lay[...,1] * A B = B * iA + lay[...,2] * A if alpha : A = array(alpha_list).max(axis = 0) ret = rollaxis(array([R,G,B,A],bg.dtype),0,len(bg.shape) ) else : ret = rollaxis(array([R,G,B],bg.dtype),0,len(bg.shape) ) if isinstance(bg,SpatialImage) : return SpatialImage(ret,bg.resolution,4,bg.info) else : return ret
def flatten (img_list) : """Concatenate all images into a single image Use alpha to blend images one on top of each other .. warning:: all images must have the same shape either 2D or 3D and an alpha channel .. warning:: the resulting image has no alpha channel :Parameters: - `img_list` (list of NxM(xP)x4 array of uint8) :Returns Type: NxM(xP)x3 array of uint8 """ R = img_list[0][...,0] G = img_list[0][...,1] B = img_list[0][...,2] for lay in img_list[1:] : alpha = lay[...,3] / 255. ialpha = 1. - alpha R = R * ialpha + lay[...,0] * alpha G = G * ialpha + lay[...,1] * alpha B = B * ialpha + lay[...,2] * alpha return rollaxis(array([R,G,B],img_list[0].dtype),0,len(img_list[0].shape) )
if instance and name:
if instance is not None and name is not None:
def set_method_name(self, name): instance = self.get_input(0) if instance and name: meth = getattr(instance, name, None) if meth: sig = Signature(meth) inputs = sig.get_all_parameters() prefix = str(instance) if len(prefix)>15: prefix = prefix[:5]+"..."+prefix[-5:] self.set_caption(prefix+" : "+name) self.internal_data["methodName"] = name self.internal_data["methodSig"] = sig self.__doc__ = sig.get_doc() self.build_ports(inputs)
prefix = str(instance)
prefix = type(instance).__name__ if not hasattr(instance, "__name__") \ else instance.__name__
def set_method_name(self, name): instance = self.get_input(0) if instance and name: meth = getattr(instance, name, None) if meth: sig = Signature(meth) inputs = sig.get_all_parameters() prefix = str(instance) if len(prefix)>15: prefix = prefix[:5]+"..."+prefix[-5:] self.set_caption(prefix+" : "+name) self.internal_data["methodName"] = name self.internal_data["methodSig"] = sig self.__doc__ = sig.get_doc() self.build_ports(inputs)
self._transform = self._transform * transfo
self._transform = transfo * self._transform
def transform (self, transfo) : """Combine a transformation with the actual transformation of this element. :Parameters: - `transfo` (:class:`SVGTransform`) """ self._transform = self._transform * transfo
self._transform = self._transform * translation(dx,dy)
self._transform = translation(dx,dy) * self._transform
def translate (self, dx, dy) : """Combine a translation with the actual transformation of this element. :Parameters: - `dx` (float) - x displacement - `dy` (float) - y displacement """ self._transform = self._transform * translation(dx,dy)
self._transform = self._transform * rotation(angle)
self._transform = rotation(angle) * self._transform
def rotate (self, angle) : """Combine a rotation with the actual transformation of this element. :Parameters: - `angle` (float) - angle of the rotation around Oz in direct orientation. """ self._transform = self._transform * rotation(angle)
self._transform = self._transform * scaling(sx,sy)
self._transform = scaling(sx,sy) * self._transform
def scale (self, sx, sy) : """Combine a scaling with the actual transformation of this element. :Parameters: - `sx` (float) - x scaling - `sy` (float) - y scaling """ self._transform = self._transform * scaling(sx,sy)
urllib.urlretrieve( "http://gforge.inria.fr/plugins/scmsvn/viewcvs.php/*checkout*/trunk/deploygui/src/openalea/deploygui/auth.py?root=openalea",
urllib.urlretrieve( "http://gforge.inria.fr/scm/viewcvs.php/*checkout*/trunk/deploygui/src/openalea/deploygui/auth.py?root=openalea",
def _validate_md5(egg_name, data): if egg_name in md5_data: digest = md5(data).hexdigest() if digest != md5_data[egg_name]: print >>sys.stderr, ( "md5 validation of %s failed! (Possible download problem?)" % egg_name ) sys.exit(2) return data
self.add_input(name="legend", interface=IBool, value=True) self.add_input(name="show", interface=IBool, value=True) self.add_input(name="figure", interface=IDict, value={"num":1}) self.add_input(name="axes", interface=IDict, value={})
self.add_input(name="legend", interface=IDict, value={'legend on':True}) self.add_input(name="show", interface=IBool, value=True) self.add_input(name="figure", interface=IDict, value={"num":1}) self.add_input(name="axes", interface=IDict, value={})
def __init__(self): """init docstring""" from pylab import plot Node.__init__(self) YLabel.__init__(self) XLabel.__init__(self) Title.__init__(self) #self.__doc__+=plot.__doc__
figure(**self.get_input("figure"))
fig = figure(**self.get_input("figure"))
def __call__(self, inputs): from pylab import figure, plot, show, clf, xlabel, ylabel, hold, title, grid, Line2D, legend, axes xinputs = self.get_input("x") yinputs = self.get_input("y") clf() #figure(**self.get_input('figure')) kwds = {} kwds['markersize']=self.get_input("markersize") kwds['marker']=markers[self.get_input("marker")] kwds['linestyle']=linestyles[self.get_input("linestyle")] kwds['color']=colors[self.get_input("color")] kwds['label']=self.get_input("label") print self.get_input("axes") #kwds['figure'] = figure(**self.get_input("figure")) figure(**self.get_input("figure")) #kwds['axes'] = axes(**self.get_input("axes")) axes(**self.get_input("axes")) print kwds #kwds['axes']=self.get_input("axes") #print self.get_input("axes")
line2dkwds = get_kwds_from_line2d(x, **kwds)
line2dkwds = get_kwds_from_line2d(x, kwds)
def __call__(self, inputs): from pylab import figure, plot, show, clf, xlabel, ylabel, hold, title, grid, Line2D, legend, axes xinputs = self.get_input("x") yinputs = self.get_input("y") clf() #figure(**self.get_input('figure')) kwds = {} kwds['markersize']=self.get_input("markersize") kwds['marker']=markers[self.get_input("marker")] kwds['linestyle']=linestyles[self.get_input("linestyle")] kwds['color']=colors[self.get_input("color")] kwds['label']=self.get_input("label") print self.get_input("axes") #kwds['figure'] = figure(**self.get_input("figure")) figure(**self.get_input("figure")) #kwds['axes'] = axes(**self.get_input("axes")) axes(**self.get_input("axes")) print kwds #kwds['axes']=self.get_input("axes") #print self.get_input("axes")
if self.get_input('legend') is True: legend(loc='best')
print self.get_input("legend").keys() if self.get_input("legend")['legend on']==True: mykwds = self.get_input("legend") del mykwds['legend on'] legend(**mykwds)
def __call__(self, inputs): from pylab import figure, plot, show, clf, xlabel, ylabel, hold, title, grid, Line2D, legend, axes xinputs = self.get_input("x") yinputs = self.get_input("y") clf() #figure(**self.get_input('figure')) kwds = {} kwds['markersize']=self.get_input("markersize") kwds['marker']=markers[self.get_input("marker")] kwds['linestyle']=linestyles[self.get_input("linestyle")] kwds['color']=colors[self.get_input("color")] kwds['label']=self.get_input("label") print self.get_input("axes") #kwds['figure'] = figure(**self.get_input("figure")) figure(**self.get_input("figure")) #kwds['axes'] = axes(**self.get_input("axes")) axes(**self.get_input("axes")) print kwds #kwds['axes']=self.get_input("axes") #print self.get_input("axes")
dummy=[] return (dummy, )
return (fig, )
def __call__(self, inputs): from pylab import figure, plot, show, clf, xlabel, ylabel, hold, title, grid, Line2D, legend, axes xinputs = self.get_input("x") yinputs = self.get_input("y") clf() #figure(**self.get_input('figure')) kwds = {} kwds['markersize']=self.get_input("markersize") kwds['marker']=markers[self.get_input("marker")] kwds['linestyle']=linestyles[self.get_input("linestyle")] kwds['color']=colors[self.get_input("color")] kwds['label']=self.get_input("label") print self.get_input("axes") #kwds['figure'] = figure(**self.get_input("figure")) figure(**self.get_input("figure")) #kwds['axes'] = axes(**self.get_input("axes")) axes(**self.get_input("axes")) print kwds #kwds['axes']=self.get_input("axes") #print self.get_input("axes")
def __init__(self): Node.__init__(self) self.add_input(name="legend on", interface=IBool, value=False) self.add_input(name="legend", interface=IStr, value=None)
location = {'best':0, 'upper right': 1, 'upper left': 2, 'lower left': 3, 'lower right': 4, 'right': 5, 'center left': 6, 'center right': 7, 'lower center': 8, 'upper center': 9, 'center': 10} def __init__(self): Node.__init__(self) self.add_input(name="legend on", interface=IBool, value=True) self.add_input(name="shadow", interface=IBool, value=False) self.add_input(name="location", interface=IEnumStr(locations.keys()), value=0) self.add_input(name="numpoints", interface=IInt, value=2) self.add_input(name="markerscale", interface=IFloat(0.1,10,0.1), value=1) self.add_input(name="fancybox", interface=IBool, value=True) self.add_input(name="ncol", interface=IInt(1,10), value=1) self.add_input(name="mode", interface=IEnumStr({'None':'None','Expanded':'exapanded'}), value=None) self.add_input(name="title", interface=IStr, value=None) self.add_input(name="properties to be done", interface=IDict, value=None) self.add_output(name="kwds", interface=IDict, value={})
def __init__(self): Node.__init__(self) self.add_input(name="legend on", interface=IBool, value=False) self.add_input(name="legend", interface=IStr, value=None)
if self.get_input('legend') is None: legend(loc='best') else: legend(self.get_input('legend'), loc='best')
kwds = {} kwds['legend on'] = self.get_input('legend on') kwds['loc'] = self.get_input('location') kwds['numpoints'] = self.get_input('numpoints') kwds['loc'] = self.get_input('location') kwds['fancybox'] = self.get_input('fancybox') kwds['markerscale'] = self.get_input('markerscale') kwds['shadow'] = self.get_input('shadow') kwds['ncol'] = self.get_input('ncol') kwds['mode'] = self.get_input('mode') kwds['title'] = self.get_input('title') return kwds
def __call__(self, inputs): from pylab import legend if self.get_input('legend') is None: legend(loc='best') else: legend(self.get_input('legend'), loc='best')
self.add_output(name="figure")
def __init__(self): #from pylab import figure #self.__doc__+=figure.__doc__ Node.__init__(self) self.add_input(name="num", interface=IInt, value=1) self.add_input(name="figsize", interface=ITuple3, value=(8, 6)) self.add_input(name="dpi", interface=IFloat, value=80.) self.add_input(name="facecolor", interface=IEnumStr(colors.keys()), value='white') self.add_input(name="edgecolor", interface=IEnumStr(colors.keys()), value='black')
fig = figure(num=self.get_input('num'), figsize=self.get_input('figsize'), dpi=self.get_input('dpi'), facecolor=self.get_input('facecolor'), edgecolor=self.get_input('edgecolor')) return fig,kwds
return kwds
def __call__(self, inputs): from pylab import figure kwds={} kwds['num']=self.get_input('num') kwds['figsize']=self.get_input('figsize') kwds['facecolor']=self.get_input('facecolor') kwds['edgecolor']=self.get_input('edgecolor') kwds['dpi']=self.get_input('dpi')
revision = __revision__.strip().split() if len(revision) > 2: revision = revision[2] else: revision = '0.8' print "Running ez_alea_setup version %s" % revision
try: revision = __revision__.strip().split() if len(revision) > 2: revision = revision[2] else: revision = '0.8' print "Running ez_alea_setup version %s" % revision except: pass
def welcome_setup(): revision = __revision__.strip().split() if len(revision) > 2: revision = revision[2] else: revision = '0.8' print "Running ez_alea_setup version %s" % revision print """
print "Running ez_alea_setup version %s" % __revision__.split()[2]
revision = __revision__.strip().split() if len(revision) > 2: revision = revision[2] else: revision = '0.8' print "Running ez_alea_setup version %s" % revision
def welcome_setup(): print "Running ez_alea_setup version %s" % __revision__.split()[2] print """
tcolor = "yellow" if status == 'A': tcolor = "green"
tcolor = '' if status == 'U': tcolor = 'orange' tag_end += '<img src="/site_media/images/orange_flag.png" width="16" height="16" alt="orange_flag.png">'
def flag_policy_issue(value, status): # to highlight the issues tag_start = '<span class="' tag_mid = '">' tag_end = '</span>' tcolor = "yellow" if status == 'A': tcolor = "green" if status == 'D': tcolor = "red" tag_end += '<img src="/site_media/images/red_flag.png" width="16" height="16" alt="red_flag.png">' value = tag_start + tcolor + tag_mid + value + tag_end return value
tcolor = "red"
tcolor = 'red'
def flag_policy_issue(value, status): # to highlight the issues tag_start = '<span class="' tag_mid = '">' tag_end = '</span>' tcolor = "yellow" if status == 'A': tcolor = "green" if status == 'D': tcolor = "red" tag_end += '<img src="/site_media/images/red_flag.png" width="16" height="16" alt="red_flag.png">' value = tag_start + tcolor + tag_mid + value + tag_end return value
pllicense = llicenseset[0].license
pllicense = llicenseset[0].license if llicense != pllicense: llicense = llicense + ' (' + pllicense + ')'
def check_policy(flicense, llicense, library, static, issue): # is the lib dynamic or static? ltype = 'Dynamic' if static: ltype = 'Static' # it's possible that the license assigned to the target or library is one of # the aliases, in which case we need the 'official' name for the policy check pllicense = llicense # we want to display both names in the report, if present pflicense = flicense llicenseset = Aliases.objects.filter(alias = llicense) if llicenseset: # can only be one match pllicense = llicenseset[0].license flicenseset = Aliases.objects.filter(alias = flicense) if flicenseset: # can only be one match pflicense = flicenseset[0].license policyset = Policy.objects.filter(tlicense = pflicense, dlicense = pllicense) policyset = policyset.filter(Q(relationship = ltype) | Q(relationship = 'Both')) # if we got multiple matches, just return - bad policies if policyset and policyset.count() < 2: status = policyset[0].status # only set the issue flag for the target coloring for the disallowed case if status == 'D': issue = issue or True if llicense != pllicense: # plug in the alias (real name) llicense = llicense + ' (' + pllicense + ')' llicense = flag_policy_issue(llicense, status) if flicense != pflicense: flicense = flicense + ' (' + pflicense + ')' # modify the target when there's been a problem somewhere in the whole license set if issue: flicense = flag_policy_issue(flicense, 'D') return issue, llicense, flicense
pflicense = flicenseset[0].license
pflicense = flicenseset[0].license if flicense != pflicense: flicense = flicense + ' (' + pflicense + ')'
def check_policy(flicense, llicense, library, static, issue): # is the lib dynamic or static? ltype = 'Dynamic' if static: ltype = 'Static' # it's possible that the license assigned to the target or library is one of # the aliases, in which case we need the 'official' name for the policy check pllicense = llicense # we want to display both names in the report, if present pflicense = flicense llicenseset = Aliases.objects.filter(alias = llicense) if llicenseset: # can only be one match pllicense = llicenseset[0].license flicenseset = Aliases.objects.filter(alias = flicense) if flicenseset: # can only be one match pflicense = flicenseset[0].license policyset = Policy.objects.filter(tlicense = pflicense, dlicense = pllicense) policyset = policyset.filter(Q(relationship = ltype) | Q(relationship = 'Both')) # if we got multiple matches, just return - bad policies if policyset and policyset.count() < 2: status = policyset[0].status # only set the issue flag for the target coloring for the disallowed case if status == 'D': issue = issue or True if llicense != pllicense: # plug in the alias (real name) llicense = llicense + ' (' + pllicense + ')' llicense = flag_policy_issue(llicense, status) if flicense != pflicense: flicense = flicense + ' (' + pflicense + ')' # modify the target when there's been a problem somewhere in the whole license set if issue: flicense = flag_policy_issue(flicense, 'D') return issue, llicense, flicense
if llicense != pllicense: llicense = llicense + ' (' + pllicense + ')'
def check_policy(flicense, llicense, library, static, issue): # is the lib dynamic or static? ltype = 'Dynamic' if static: ltype = 'Static' # it's possible that the license assigned to the target or library is one of # the aliases, in which case we need the 'official' name for the policy check pllicense = llicense # we want to display both names in the report, if present pflicense = flicense llicenseset = Aliases.objects.filter(alias = llicense) if llicenseset: # can only be one match pllicense = llicenseset[0].license flicenseset = Aliases.objects.filter(alias = flicense) if flicenseset: # can only be one match pflicense = flicenseset[0].license policyset = Policy.objects.filter(tlicense = pflicense, dlicense = pllicense) policyset = policyset.filter(Q(relationship = ltype) | Q(relationship = 'Both')) # if we got multiple matches, just return - bad policies if policyset and policyset.count() < 2: status = policyset[0].status # only set the issue flag for the target coloring for the disallowed case if status == 'D': issue = issue or True if llicense != pllicense: # plug in the alias (real name) llicense = llicense + ' (' + pllicense + ')' llicense = flag_policy_issue(llicense, status) if flicense != pflicense: flicense = flicense + ' (' + pflicense + ')' # modify the target when there's been a problem somewhere in the whole license set if issue: flicense = flag_policy_issue(flicense, 'D') return issue, llicense, flicense
if flicense != pflicense: flicense = flicense + ' (' + pflicense + ')'
if not policyset and flicense != 'TBD': llicense = flag_policy_issue(llicense, 'U')
def check_policy(flicense, llicense, library, static, issue): # is the lib dynamic or static? ltype = 'Dynamic' if static: ltype = 'Static' # it's possible that the license assigned to the target or library is one of # the aliases, in which case we need the 'official' name for the policy check pllicense = llicense # we want to display both names in the report, if present pflicense = flicense llicenseset = Aliases.objects.filter(alias = llicense) if llicenseset: # can only be one match pllicense = llicenseset[0].license flicenseset = Aliases.objects.filter(alias = flicense) if flicenseset: # can only be one match pflicense = flicenseset[0].license policyset = Policy.objects.filter(tlicense = pflicense, dlicense = pllicense) policyset = policyset.filter(Q(relationship = ltype) | Q(relationship = 'Both')) # if we got multiple matches, just return - bad policies if policyset and policyset.count() < 2: status = policyset[0].status # only set the issue flag for the target coloring for the disallowed case if status == 'D': issue = issue or True if llicense != pllicense: # plug in the alias (real name) llicense = llicense + ' (' + pllicense + ')' llicense = flag_policy_issue(llicense, status) if flicense != pflicense: flicense = flicense + ' (' + pflicense + ')' # modify the target when there's been a problem somewhere in the whole license set if issue: flicense = flag_policy_issue(flicense, 'D') return issue, llicense, flicense
tag_start = '<font color="'
tag_start = '<span class="'
def flag_policy_issue(value, status): # to highlight the issues tag_start = '<font color="' tag_mid = '">' tag_end = '</font>' tcolor = "yellow" if status == 'A': tcolor = "green" if status == 'D': tcolor = "red" tag_end += '<img src="/site_media/images/red_flag.png" width="16" height="16" alt="red_flag.png">' value = tag_start + tcolor + tag_mid + value + tag_end return value
tag_end = '</font>'
tag_end = '</span>'
def flag_policy_issue(value, status): # to highlight the issues tag_start = '<font color="' tag_mid = '">' tag_end = '</font>' tcolor = "yellow" if status == 'A': tcolor = "green" if status == 'D': tcolor = "red" tag_end += '<img src="/site_media/images/red_flag.png" width="16" height="16" alt="red_flag.png">' value = tag_start + tcolor + tag_mid + value + tag_end return value
target = dep_path(parent, dep) childdeps = deps_check(target)
if not re.search('(static)', dep): target = dep_path(parent, dep) childdeps = deps_check(target) else: childdeps = []
def dep_loop(parent, soname, dep, level): if level > depth: return if level == 1: print_path_dep(parent, soname, dep, level) print_dep(dep, level) else: print_path_dep(parent, soname, dep, level) print_dep(dep, level) target = dep_path(parent, dep) childdeps = deps_check(target) if len(childdeps) > 0: for childdep in childdeps: dep_loop(target, dep, childdep, level + 1)
for lib in libs: if lib not in staticlib_list: staticlib_list.append(lib)
if len(libs) == 1: if libs[0] not in staticlib_list: staticlib_list.append(libs[0]) elif len(libs) > 1: staticlib_multiples[func] = libs for func in staticlib_multiples: found = False for lib in staticlib_multiples[func]: if lib in staticlib_list: found = True break if not found: staticlib_list.append(staticlib_multiples[func][0])
def static_deps_check(target): "Look for statically linked dependencies." # State enumeration for debug parser. FIND_NEXT = 1 FIND_NAME = 2 # The algorithm here is pretty basic. We grab a complete symbol list # and debug information. Any symbols that aren't covered by debug # information are considered to be source from static libraries. # Read the functions from the symbol list. symlist = [ x.split() for x in os.popen("readelf -s " + target) ] symlist = [ x for x in symlist if len(x) == 8 ] sym_funcs = set([ x[7] for x in symlist if x[3] == "FUNC" ]) # Read the functions from the debug information. debuginfo = os.popen("readelf -wi " + target) debug_funcs = set() debugstate = FIND_NEXT for line in debuginfo: if len(line) < 2: continue if debugstate == FIND_NAME: if line[1] == "<": debugstate = FIND_NEXT else: match = re.match(r'\s+<.+>\s+(.+?)\s+:\s+\(.+\):\s+(.+)$', line) if match: (field, value) = match.group(1, 2) if field == "DW_AT_name": debug_funcs.add(value.strip()) debugstate = FIND_NEXT if debugstate == FIND_NEXT and line[1] == "<": match = re.search(r'\((.+)\)$', line) if match and match.group(1) == "DW_TAG_subprogram": found_name = None debugstate = FIND_NAME # Get the functions in the symbol list that have no debug info. staticsym_funcs = sym_funcs - debug_funcs # For each function, figure out where it came from. staticlib_list = [] for func in staticsym_funcs: libs = find_static_library(func) for lib in libs: if lib not in staticlib_list: staticlib_list.append(lib) # Format and return the list. staticlib_list.sort() staticlib_results = [ x + " (static)" for x in staticlib_list ] return staticlib_results
dep = dep.replace("[","") dep = dep.replace("]","")
dep = dep.strip("[]")
def deps_check(target): deps = [] # run the "file" command and see if it's ELF filetype = os.popen("file " + target).read() if re.search("ELF", filetype): if not re.search("statically linked", filetype): elfcall = "readelf -d " + target for elfdata in os.popen(elfcall).readlines(): # lines we want all have "NEEDED" if re.search("NEEDED", elfdata): # library is the 5th field dep = string.split(elfdata)[4] dep = dep.replace("[","") dep = dep.replace("]","") deps.append(dep) if do_static: deps.extend(static_deps_check(target)) else: raise NotELFError, "not an ELF file" return deps
def deps_print(title, parent, target, level, deps):
def print_deps(target, deps):
def deps_print(title, parent, target, level, deps): csvstring = '' spacer = '' nospace = '' if level > 0: nospace += spacer for space in range(0, level): spacer += " " if len(deps) < 1: # FIXME - this blows up the recursion, just drop it? #deps.append("NONE") # at ld-linux - just suppress the output return if do_csv: if depth == 1: csvstring += str(level + 1) + "," + target else: csvstring += str(level + 1) + "," + target + "," + title if level > 0 or depth < 2: for dep in deps: csvstring += "," + dep print csvstring else: if level == 1: print spacer + title print spacer + "[" + str(level + 1) + "]" + target + ":" spacer += " " if level > 0 or depth < 2: for dep in deps: print spacer + dep
nospace = '' if level > 0: nospace += spacer for space in range(0, level):
if len(deps) < 1: return if do_csv: csvstring += str(1) + "," + target for dep in deps: csvstring += "," + dep print csvstring else: print spacer + "[" + str(1) + "]" + target + ":"
def deps_print(title, parent, target, level, deps): csvstring = '' spacer = '' nospace = '' if level > 0: nospace += spacer for space in range(0, level): spacer += " " if len(deps) < 1: # FIXME - this blows up the recursion, just drop it? #deps.append("NONE") # at ld-linux - just suppress the output return if do_csv: if depth == 1: csvstring += str(level + 1) + "," + target else: csvstring += str(level + 1) + "," + target + "," + title if level > 0 or depth < 2: for dep in deps: csvstring += "," + dep print csvstring else: if level == 1: print spacer + title print spacer + "[" + str(level + 1) + "]" + target + ":" spacer += " " if level > 0 or depth < 2: for dep in deps: print spacer + dep
if len(deps) < 1:
for dep in deps: print spacer + dep def print_dep(dep, indent): spacer = 2 * indent * " " if not do_csv: print spacer + dep def print_path_dep(parent, soname, dep, indent): csvstring = '' spacer = (indent - 1) * " " token = "[" + str(indent) + "]" if not do_csv: print spacer + token + parent + ":" else: csvstring += str(indent) + "," + parent + "," if indent != 1 and soname: csvstring += soname + "," csvstring += dep print csvstring def dep_loop(parent, soname, dep, level): if level > depth:
def deps_print(title, parent, target, level, deps): csvstring = '' spacer = '' nospace = '' if level > 0: nospace += spacer for space in range(0, level): spacer += " " if len(deps) < 1: # FIXME - this blows up the recursion, just drop it? #deps.append("NONE") # at ld-linux - just suppress the output return if do_csv: if depth == 1: csvstring += str(level + 1) + "," + target else: csvstring += str(level + 1) + "," + target + "," + title if level > 0 or depth < 2: for dep in deps: csvstring += "," + dep print csvstring else: if level == 1: print spacer + title print spacer + "[" + str(level + 1) + "]" + target + ":" spacer += " " if level > 0 or depth < 2: for dep in deps: print spacer + dep
if do_csv: if depth == 1: csvstring += str(level + 1) + "," + target else: csvstring += str(level + 1) + "," + target + "," + title if level > 0 or depth < 2: for dep in deps: csvstring += "," + dep print csvstring else: if level == 1: print spacer + title print spacer + "[" + str(level + 1) + "]" + target + ":" spacer += " " if level > 0 or depth < 2: for dep in deps: print spacer + dep def print_parent(parent, dep): if not do_csv: print '[1]' + parent + ":" else: print '1,' + parent + "," + dep def dep_loop(parent, deps): for dep in deps: print_parent(parent, dep) if dep != "STATIC": childparent = parent target = dep_path(childparent,dep) for level in range(1, depth): childdeps = deps_check(target) deps_print(dep, childparent, target, level, childdeps) if len(childdeps) > 0: childparent = target target = dep_path(childparent,childdeps[0]) else: break
if level == 1: print_path_dep(parent, soname, dep, level) print_dep(dep, level) else: print_path_dep(parent, soname, dep, level) print_dep(dep, level) target = dep_path(parent, dep) childdeps = deps_check(target) if len(childdeps) > 0: for childdep in childdeps: dep_loop(target, dep, childdep, level + 1)
def deps_print(title, parent, target, level, deps): csvstring = '' spacer = '' nospace = '' if level > 0: nospace += spacer for space in range(0, level): spacer += " " if len(deps) < 1: # FIXME - this blows up the recursion, just drop it? #deps.append("NONE") # at ld-linux - just suppress the output return if do_csv: if depth == 1: csvstring += str(level + 1) + "," + target else: csvstring += str(level + 1) + "," + target + "," + title if level > 0 or depth < 2: for dep in deps: csvstring += "," + dep print csvstring else: if level == 1: print spacer + title print spacer + "[" + str(level + 1) + "]" + target + ":" spacer += " " if level > 0 or depth < 2: for dep in deps: print spacer + dep
deps_print(parent, parent, candidate, 0, deps)
print_deps(candidate, deps)
def main(): opt_parser = optparse.OptionParser(usage=usage_line, version="%prog version " + version, option_list=command_line_options) (options, args) = opt_parser.parse_args() if len(args) == 0 or len(args) > 2: opt_parser.error("improper number of non-option arguments") # prog_ndx_start is the offset in argv for the file/dir and recursion prog_ndx_start = 1 found = 0 parent = "" global do_csv, depth, do_static do_static = options.do_static do_csv = options.do_csv if options.target: do_search = True target = options.target target_file = args[0] if not os.path.isdir(target): print target + " does not appear to be a directory..." sys.exit(1) else: do_search = False target = args[0] if not(os.path.isdir(target) or os.path.isfile(target)): print target + " does not appear to be a directory or file..." sys.exit(1) # sanity check on recursion level if len(args) == 1: depth = 1 else: try: recursion_arg = args[1] depth = int(recursion_arg) except: bad_depth() if depth < 1: bad_depth() if os.path.isdir(target): # walk the directory tree and find ELF files to process for path, dirs, files in os.walk(target): for filename in files: if (do_search and (filename == target_file)) or not(do_search): candidate = os.path.join(path, filename) if os.path.isfile(candidate): try: deps = deps_check(candidate) except NotELFError: deps = [] if len(deps) > 0: if depth == 1: deps_print(parent, parent, candidate, 0, deps) # do recursion if called for else: dep_loop(candidate, deps) if do_search and (filename == target_file): found = 1 break if do_search and not found: print target_file + " was not found in " + target + " ..." sys.exit(1) else: # single file, just check it and exit # top level deps parent = target try: deps = deps_check(target) except NotELFError: print "not an ELF file..." sys.exit(1) # FIXME: for now, if no deps were found, assume the file # is statically linked. Should rework the recursion so # we don't need this. if not deps: deps.append("STATIC") if depth == 1: deps_print(parent, parent, target, 0, deps) # do recursion if called for else: dep_loop(parent, deps) sys.exit(0)
dep_loop(candidate, deps)
for dep in deps: dep_loop(candidate, None, dep, 1)
def main(): opt_parser = optparse.OptionParser(usage=usage_line, version="%prog version " + version, option_list=command_line_options) (options, args) = opt_parser.parse_args() if len(args) == 0 or len(args) > 2: opt_parser.error("improper number of non-option arguments") # prog_ndx_start is the offset in argv for the file/dir and recursion prog_ndx_start = 1 found = 0 parent = "" global do_csv, depth, do_static do_static = options.do_static do_csv = options.do_csv if options.target: do_search = True target = options.target target_file = args[0] if not os.path.isdir(target): print target + " does not appear to be a directory..." sys.exit(1) else: do_search = False target = args[0] if not(os.path.isdir(target) or os.path.isfile(target)): print target + " does not appear to be a directory or file..." sys.exit(1) # sanity check on recursion level if len(args) == 1: depth = 1 else: try: recursion_arg = args[1] depth = int(recursion_arg) except: bad_depth() if depth < 1: bad_depth() if os.path.isdir(target): # walk the directory tree and find ELF files to process for path, dirs, files in os.walk(target): for filename in files: if (do_search and (filename == target_file)) or not(do_search): candidate = os.path.join(path, filename) if os.path.isfile(candidate): try: deps = deps_check(candidate) except NotELFError: deps = [] if len(deps) > 0: if depth == 1: deps_print(parent, parent, candidate, 0, deps) # do recursion if called for else: dep_loop(candidate, deps) if do_search and (filename == target_file): found = 1 break if do_search and not found: print target_file + " was not found in " + target + " ..." sys.exit(1) else: # single file, just check it and exit # top level deps parent = target try: deps = deps_check(target) except NotELFError: print "not an ELF file..." sys.exit(1) # FIXME: for now, if no deps were found, assume the file # is statically linked. Should rework the recursion so # we don't need this. if not deps: deps.append("STATIC") if depth == 1: deps_print(parent, parent, target, 0, deps) # do recursion if called for else: dep_loop(parent, deps) sys.exit(0)
if not deps: deps.append("STATIC")
def main(): opt_parser = optparse.OptionParser(usage=usage_line, version="%prog version " + version, option_list=command_line_options) (options, args) = opt_parser.parse_args() if len(args) == 0 or len(args) > 2: opt_parser.error("improper number of non-option arguments") # prog_ndx_start is the offset in argv for the file/dir and recursion prog_ndx_start = 1 found = 0 parent = "" global do_csv, depth, do_static do_static = options.do_static do_csv = options.do_csv if options.target: do_search = True target = options.target target_file = args[0] if not os.path.isdir(target): print target + " does not appear to be a directory..." sys.exit(1) else: do_search = False target = args[0] if not(os.path.isdir(target) or os.path.isfile(target)): print target + " does not appear to be a directory or file..." sys.exit(1) # sanity check on recursion level if len(args) == 1: depth = 1 else: try: recursion_arg = args[1] depth = int(recursion_arg) except: bad_depth() if depth < 1: bad_depth() if os.path.isdir(target): # walk the directory tree and find ELF files to process for path, dirs, files in os.walk(target): for filename in files: if (do_search and (filename == target_file)) or not(do_search): candidate = os.path.join(path, filename) if os.path.isfile(candidate): try: deps = deps_check(candidate) except NotELFError: deps = [] if len(deps) > 0: if depth == 1: deps_print(parent, parent, candidate, 0, deps) # do recursion if called for else: dep_loop(candidate, deps) if do_search and (filename == target_file): found = 1 break if do_search and not found: print target_file + " was not found in " + target + " ..." sys.exit(1) else: # single file, just check it and exit # top level deps parent = target try: deps = deps_check(target) except NotELFError: print "not an ELF file..." sys.exit(1) # FIXME: for now, if no deps were found, assume the file # is statically linked. Should rework the recursion so # we don't need this. if not deps: deps.append("STATIC") if depth == 1: deps_print(parent, parent, target, 0, deps) # do recursion if called for else: dep_loop(parent, deps) sys.exit(0)
deps_print(parent, parent, target, 0, deps)
print_deps(target, deps)
def main(): opt_parser = optparse.OptionParser(usage=usage_line, version="%prog version " + version, option_list=command_line_options) (options, args) = opt_parser.parse_args() if len(args) == 0 or len(args) > 2: opt_parser.error("improper number of non-option arguments") # prog_ndx_start is the offset in argv for the file/dir and recursion prog_ndx_start = 1 found = 0 parent = "" global do_csv, depth, do_static do_static = options.do_static do_csv = options.do_csv if options.target: do_search = True target = options.target target_file = args[0] if not os.path.isdir(target): print target + " does not appear to be a directory..." sys.exit(1) else: do_search = False target = args[0] if not(os.path.isdir(target) or os.path.isfile(target)): print target + " does not appear to be a directory or file..." sys.exit(1) # sanity check on recursion level if len(args) == 1: depth = 1 else: try: recursion_arg = args[1] depth = int(recursion_arg) except: bad_depth() if depth < 1: bad_depth() if os.path.isdir(target): # walk the directory tree and find ELF files to process for path, dirs, files in os.walk(target): for filename in files: if (do_search and (filename == target_file)) or not(do_search): candidate = os.path.join(path, filename) if os.path.isfile(candidate): try: deps = deps_check(candidate) except NotELFError: deps = [] if len(deps) > 0: if depth == 1: deps_print(parent, parent, candidate, 0, deps) # do recursion if called for else: dep_loop(candidate, deps) if do_search and (filename == target_file): found = 1 break if do_search and not found: print target_file + " was not found in " + target + " ..." sys.exit(1) else: # single file, just check it and exit # top level deps parent = target try: deps = deps_check(target) except NotELFError: print "not an ELF file..." sys.exit(1) # FIXME: for now, if no deps were found, assume the file # is statically linked. Should rework the recursion so # we don't need this. if not deps: deps.append("STATIC") if depth == 1: deps_print(parent, parent, target, 0, deps) # do recursion if called for else: dep_loop(parent, deps) sys.exit(0)
dep_loop(parent, deps)
for dep in deps: dep_loop(parent, None, dep, 1)
def main(): opt_parser = optparse.OptionParser(usage=usage_line, version="%prog version " + version, option_list=command_line_options) (options, args) = opt_parser.parse_args() if len(args) == 0 or len(args) > 2: opt_parser.error("improper number of non-option arguments") # prog_ndx_start is the offset in argv for the file/dir and recursion prog_ndx_start = 1 found = 0 parent = "" global do_csv, depth, do_static do_static = options.do_static do_csv = options.do_csv if options.target: do_search = True target = options.target target_file = args[0] if not os.path.isdir(target): print target + " does not appear to be a directory..." sys.exit(1) else: do_search = False target = args[0] if not(os.path.isdir(target) or os.path.isfile(target)): print target + " does not appear to be a directory or file..." sys.exit(1) # sanity check on recursion level if len(args) == 1: depth = 1 else: try: recursion_arg = args[1] depth = int(recursion_arg) except: bad_depth() if depth < 1: bad_depth() if os.path.isdir(target): # walk the directory tree and find ELF files to process for path, dirs, files in os.walk(target): for filename in files: if (do_search and (filename == target_file)) or not(do_search): candidate = os.path.join(path, filename) if os.path.isfile(candidate): try: deps = deps_check(candidate) except NotELFError: deps = [] if len(deps) > 0: if depth == 1: deps_print(parent, parent, candidate, 0, deps) # do recursion if called for else: dep_loop(candidate, deps) if do_search and (filename == target_file): found = 1 break if do_search and not found: print target_file + " was not found in " + target + " ..." sys.exit(1) else: # single file, just check it and exit # top level deps parent = target try: deps = deps_check(target) except NotELFError: print "not an ELF file..." sys.exit(1) # FIXME: for now, if no deps were found, assume the file # is statically linked. Should rework the recursion so # we don't need this. if not deps: deps.append("STATIC") if depth == 1: deps_print(parent, parent, target, 0, deps) # do recursion if called for else: dep_loop(parent, deps) sys.exit(0)
license = models.CharField('License', max_length=200, unique=True)
license = models.CharField('License', max_length=200) version = models.CharField('Version', max_length=20)
def __unicode__(self): return self.library
licenses = License.objects.all().order_by('license') choices = [] choices.append(('','')) for lic in licenses: choices.append((lic.license, lic.license))
def __init__(self, *args, **kwargs): super(PolicyForm, self).__init__(*args, **kwargs) self.fields['tlicense'].choices = license_choices() self.fields['dlicense'].choices = license_choices()
#def __unicode__(self):
tlicense.choices = choices dlicense.choices = choices
#def __unicode__(self):
else: resultsform = ResultsForm()
def results(request): if request.method == 'POST': # If the form has been submitted... testlist = request.POST.get('testlist', '') if testlist != '': tests = testlist.split(",") for test in tests: if test != '': q = Test.objects.filter(id = test) q.delete() q = File.objects.filter(test = test) q.delete() q = Lib.objects.filter(test = test) q.delete() else: resultsform = ResultsForm() latest_test_list = Test.objects.all().order_by('-test_date') return render_to_response('linkage/results.html', {'latest_test_list': latest_test_list})
if re.search("^Add License", mode):
if re.search("^Add", mode) and re.search("License", mode):
def licenses(request): errmsg = '' if request.method == 'POST': # If the form has been submitted... mode = urllib.unquote(request.POST.get('submit')) if re.search("^Add License", mode): licenseform = LicenseForm(request.POST) # A form bound to the POST data # request to add data if licenseform.is_valid(): # All validation rules pass licenseform.save() if re.search("^Add", mode) and re.search("Aliases", mode): aliasesform = AliasesForm(request.POST) # A form bound to the POST data # request to add data - we may have multiple aliases to add if aliasesform.is_valid(): # All validation rules pass license = aliasesform.cleaned_data['license'] errlist = [] for i in range(1,10): ainput = request.POST.get('alinput' + str(i), '') if ainput: aliasdata = Aliases(license = license, alias = ainput) try: aliasdata.save() except: errlist.append(str(ainput)) if errlist: errmsg = "<b>Warning:</b> failed to add duplicate aliases " + str(errlist) if re.search("^Delete Selected Licenses", mode): # delete request licenselist = request.POST.get('licenselist', '') if licenselist != '': delete_records(License, licenselist) if re.search("^Delete Selected Aliases", mode): # delete request aliaslist = request.POST.get('aliaslist', '') if aliaslist != '': # not by id here, so don't call delete_records records = aliaslist.split(",") for record in records: if record != '': q = Aliases.objects.filter(license = record) q.delete() licenseform = LicenseForm() # An unbound form aliasesform = AliasesForm() # An unbound form latest_license_list = License.objects.all().order_by('longname') # we represent this one differently in the gui, pre-arrange things here aliases_list = Aliases.objects.values('license').distinct() for l in aliases_list: alias_list = Aliases.objects.values('alias').filter(license = l['license']) aliases = '' for a in alias_list: aliases += a['alias'] + ' | ' # chomp the last "or" off l['alias'] = aliases[:-3] # we want multiple input boxes to enter a number of aliases per license, at once al_input = [] for i in range(1,10): al_input.append('<input type="text" size="6" name="alinput' + str(i) + '">') return render_to_response('linkage/licenses.html', { 'errmsg': errmsg, 'latest_license_list': latest_license_list, 'latest_aliases_list': aliases_list, 'licenseform': licenseform, 'aliasesform': aliasesform, 'input_list': al_input, 'tab_licenses': True })
TIME_ZONE = 'America/Chicago'
if 'TZ' not in os.environ: TIME_ZONE = 'Etc/GMT%+d' % (time.altzone / 3600) else: TIME_ZONE = os.environ['TZ']
def get_project_root(): project_root_paths = [ ".", "..", "/opt/linuxfoundation" ] for path in project_root_paths: if os.path.exists(os.path.join(path, "bin/readelf.py")): return path # Shouldn't get here unless we can't find the path. raise RuntimeError, "could not find the project path"
if issue: flicense = flag_policy_issue(flicense, status)
if issue: flicense = flag_policy_issue(flicense, 'D')
def check_policy(flicense, llicense, library, issue): # is the lib dynamic or static? ltype = 'Dynamic' if re.search(is_static, library): ltype = 'Static' # it's possible that the license assigned to the target or library is one of # the aliases, in which case we need the 'official' name for the policy check pllicense = llicense # we want to display both names in the report, if present pflicense = flicense llicenseset = Aliases.objects.filter(alias = llicense) if llicenseset: # can only be one match pllicense = llicenseset[0].license flicenseset = Aliases.objects.filter(alias = flicense) if flicenseset: # can only be one match pflicense = flicenseset[0].license policyset = Policy.objects.filter(tlicense = pflicense, dlicense = pllicense) policyset = policyset.filter(Q(relationship = ltype) | Q(relationship = 'Both')) # if we got multiple matches, just return - bad policies if policyset and policyset.count() < 2: status = policyset[0].status if status == 'D': issue = issue or True if llicense != pllicense: # plug in the alias (real name) llicense = llicense + ' (' + pllicense + ')' llicense = flag_policy_issue(llicense, status) if flicense != pflicense: flicense = flicense + ' (' + pflicense + ')' # only modify the target when there's a problem if issue: flicense = flag_policy_issue(flicense, status) return issue, llicense, flicense
deps = deps_check(candidate)
try: deps = deps_check(candidate) except NotELFError: deps = []
def main(argv): if len(argv) < 2 or '-?' in argv or '-h' in argv: show_usage(argv) # prog_ndx_start is the offset in argv for the file/dir and recursion prog_ndx_start = 1 do_search = 0 do_csv = 0 found = 0 parent = "" if '-c' in argv: sloc = string.index(argv, "-c") if prog_ndx_start <= sloc: prog_ndx_start = sloc + 1; do_csv = 1 if '-s' in argv: sloc = string.index(argv, "-s") target = argv[sloc + 1] if prog_ndx_start <= sloc: prog_ndx_start = sloc + 2; do_search = 1 # sanity check on file/directory name if not(do_search): target = argv[prog_ndx_start] if not(os.path.isdir(target) or os.path.isfile(target)): print target + " does not appear to be a directory or file..." sys.exit(1) else: if len(argv) < 4: show_usage(argv) target_file = argv[prog_ndx_start] if not os.path.isdir(target): print target + " does not appear to be a directory..." sys.exit(1) # sanity check on recursion level if len(argv) == prog_ndx_start + 1: depth = 1 else: try: recursion_arg = argv[prog_ndx_start + 1] depth = int(recursion_arg) except: bad_depth() if depth < 1: bad_depth() if os.path.isdir(target): # walk the directory tree and find ELF files to process for path, dirs, files in os.walk(target): for filename in files: if (do_search and (filename == target_file)) or not(do_search): candidate = os.path.join(path, filename) if os.path.isfile(candidate): deps = deps_check(candidate) if len(deps) > 0: if depth == 1: deps_print(parent, parent, candidate, 0, deps, do_csv, depth) # do recursion if called for else: dep_loop(depth, candidate, deps, do_csv) if do_search and (filename == target_file): found = 1 break if do_search and not found: print target_file + " was not found in " + target + " ..." sys.exit(1) else: # single file, just check it and exit # top level deps parent = target try: deps = deps_check(target) except NotELFError: print "not an ELF file..." sys.exit(1) # FIXME: for now, if no deps were found, assume the file # is statically linked. Should rework the recursion so # we don't need this. if not deps: deps.append("STATIC") if depth == 1: deps_print(parent, parent, target, 0, deps, do_csv, depth) # do recursion if called for else: dep_loop(depth, parent, deps, do_csv) sys.exit(0)
def start(run_browser, interface=None):
def start_server(run_browser, interface=None):
def start(run_browser, interface=None): if settings.USERDIR_ROOT: setup_userdir() childpid = os.fork() if childpid == 0: os.setsid() log_fn = os.path.join(settings.STATE_ROOT, "server.log") try: log_fd = os.open(log_fn, os.O_WRONLY | os.O_APPEND | os.O_CREAT) except OSError: log_fd = -1 if log_fd < 0: sys.stderr.write("Could not open log file; logging to stdout.\n") else: os.dup2(log_fd, 1) os.dup2(log_fd, 2) os.close(0) manager_args = ["dep-checker", "runserver"] if interface: manager_args.append(interface) execute_manager(settings, manager_args) else: pid_path = os.path.join(settings.STATE_ROOT, "server.pid") pid_file = open(pid_path, "w") pid_file.write(str(childpid)) pid_file.close() if run_browser: if interface: if interface.find(":") != -1: (ipaddr, port) = interface.split(":") if ipaddr == "0.0.0.0": interface = "127.0.0.1:" + port app_url = "http://%s/linkage" % interface else: app_url = "http://127.0.0.1:8000/linkage" sys.stdout.write("Waiting for the server to start...\n") time.sleep(10) sys.stdout.write("Starting a web browser.\n") os.execlp("xdg-open", "xdg-open", app_url) else: sys.exit(0)
def stop():
def stop_server():
def stop(): pid_path = os.path.join(settings.STATE_ROOT, "server.pid") if os.path.exists(pid_path): server_pid = int(open(pid_path).read()) sys.stdout.write("Killing process %d...\n" % server_pid) try: try: os.kill(server_pid, signal.SIGTERM) finally: os.unlink(pid_path) except OSError, e: sys.stderr.write("Could not kill process: %s\n" % str(e)) sys.exit(1) else: sys.stderr.write("No server process found to stop.\n") sys.exit(1)
found_lib = None
found_libs = []
def find_static_library(func): "Given a symbol, return the most likely static library it's from." found_lib = None dbpath = None for dp in database_search_path: if os.path.exists(os.path.join(dp, "staticdb.sqlite")): dbpath = dp break if dbpath: staticdb = sqlite3.connect(os.path.join(dbpath, "staticdb.sqlite")) cursor = staticdb.cursor() cursor.execute("SELECT library FROM static WHERE symbol=?", (func,)) results = cursor.fetchall() if len(results) == 1: found_lib = results[0][0] + " (static)" elif len(results) > 1: found_libs = [ x[0] for x in results ] found_lib = ",".join(found_libs) + " (static)" return found_lib
found_lib = results[0][0] + " (static)"
found_libs.append(results[0][0])
def find_static_library(func): "Given a symbol, return the most likely static library it's from." found_lib = None dbpath = None for dp in database_search_path: if os.path.exists(os.path.join(dp, "staticdb.sqlite")): dbpath = dp break if dbpath: staticdb = sqlite3.connect(os.path.join(dbpath, "staticdb.sqlite")) cursor = staticdb.cursor() cursor.execute("SELECT library FROM static WHERE symbol=?", (func,)) results = cursor.fetchall() if len(results) == 1: found_lib = results[0][0] + " (static)" elif len(results) > 1: found_libs = [ x[0] for x in results ] found_lib = ",".join(found_libs) + " (static)" return found_lib
found_lib = ",".join(found_libs) + " (static)" return found_lib
return found_libs
def find_static_library(func): "Given a symbol, return the most likely static library it's from." found_lib = None dbpath = None for dp in database_search_path: if os.path.exists(os.path.join(dp, "staticdb.sqlite")): dbpath = dp break if dbpath: staticdb = sqlite3.connect(os.path.join(dbpath, "staticdb.sqlite")) cursor = staticdb.cursor() cursor.execute("SELECT library FROM static WHERE symbol=?", (func,)) results = cursor.fetchall() if len(results) == 1: found_lib = results[0][0] + " (static)" elif len(results) > 1: found_libs = [ x[0] for x in results ] found_lib = ",".join(found_libs) + " (static)" return found_lib
lib = find_static_library(func) if lib and lib not in staticlib_list: staticlib_list.append(lib)
libs = find_static_library(func) for lib in libs: if lib not in staticlib_list: staticlib_list.append(lib)
def static_deps_check(target): "Look for statically linked dependencies." # State enumeration for debug parser. FIND_NEXT = 1 FIND_NAME = 2 # The algorithm here is pretty basic. We grab a complete symbol list # and debug information. Any symbols that aren't covered by debug # information are considered to be source from static libraries. # Read the functions from the symbol list. symlist = [ x.split() for x in os.popen("readelf -s " + target) ] symlist = [ x for x in symlist if len(x) == 8 ] sym_funcs = set([ x[7] for x in symlist if x[3] == "FUNC" ]) # Read the functions from the debug information. debuginfo = os.popen("readelf -wi " + target) debug_funcs = set() debugstate = FIND_NEXT for line in debuginfo: if len(line) < 2: continue if debugstate == FIND_NAME: if line[1] == "<": debugstate = FIND_NEXT else: match = re.match(r'\s+<.+>\s+(.+?)\s+:\s+\(.+\):\s+(.+)$', line) if match: (field, value) = match.group(1, 2) if field == "DW_AT_name": debug_funcs.add(value.strip()) debugstate = FIND_NEXT if debugstate == FIND_NEXT and line[1] == "<": match = re.search(r'\((.+)\)$', line) if match and match.group(1) == "DW_TAG_subprogram": found_name = None debugstate = FIND_NAME # Get the functions in the symbol list that have no debug info. staticsym_funcs = sym_funcs - debug_funcs # For each function, figure out where it came from. staticlib_list = [] for func in staticsym_funcs: lib = find_static_library(func) if lib and lib not in staticlib_list: staticlib_list.append(lib) # Return the list. staticlib_list.sort() return staticlib_list
return staticlib_list
staticlib_results = [ x + " (static)" for x in staticlib_list ] return staticlib_results
def static_deps_check(target): "Look for statically linked dependencies." # State enumeration for debug parser. FIND_NEXT = 1 FIND_NAME = 2 # The algorithm here is pretty basic. We grab a complete symbol list # and debug information. Any symbols that aren't covered by debug # information are considered to be source from static libraries. # Read the functions from the symbol list. symlist = [ x.split() for x in os.popen("readelf -s " + target) ] symlist = [ x for x in symlist if len(x) == 8 ] sym_funcs = set([ x[7] for x in symlist if x[3] == "FUNC" ]) # Read the functions from the debug information. debuginfo = os.popen("readelf -wi " + target) debug_funcs = set() debugstate = FIND_NEXT for line in debuginfo: if len(line) < 2: continue if debugstate == FIND_NAME: if line[1] == "<": debugstate = FIND_NEXT else: match = re.match(r'\s+<.+>\s+(.+?)\s+:\s+\(.+\):\s+(.+)$', line) if match: (field, value) = match.group(1, 2) if field == "DW_AT_name": debug_funcs.add(value.strip()) debugstate = FIND_NEXT if debugstate == FIND_NEXT and line[1] == "<": match = re.search(r'\((.+)\)$', line) if match and match.group(1) == "DW_TAG_subprogram": found_name = None debugstate = FIND_NAME # Get the functions in the symbol list that have no debug info. staticsym_funcs = sym_funcs - debug_funcs # For each function, figure out where it came from. staticlib_list = [] for func in staticsym_funcs: lib = find_static_library(func) if lib and lib not in staticlib_list: staticlib_list.append(lib) # Return the list. staticlib_list.sort() return staticlib_list
help="don't set cfg permissions")
help="don't set file permissions")
def arguments(): """Defines the command line arguments for the script.""" main_desc = ("Mirror a remote FTP directory into a local directory or vice " "versa through the lftp program") subs_desc = "Select a running mode from the following:" epilog = ("For detailed help for each mode, select a mode followed by help " "option, e.g.:{0}{0}%(prog)s shell -h").format(os.linesep) cron_use = "%(prog)s [-h]" shell_use = ("%(prog)s site remote local [options]{0}{0}By default " "downloads the changes from remote FTP directory to local " "directory.{0}To upload changes from local to remote FTP, use " "the 'r, --reverse' option").format(os.linesep) file_use = ("%(prog)s config_file [-h]{0}{0}The structure of the " "config file (a simple text file) is as follows:{0}{0}[section]" "{0}site = {{ftp server URL or IP}}{0}port = (ftp server port)" "{0}remote = {{remote directory}}{0}local = {{local directory}}" "{0}user = (ftp server username){0}password = (user password " "encoded in base64){0}options = (other options){0}{0}Section is" " a name that defines the mirror operation. Usually is the ftp " "server's name or directory' name. Useful for distinguish one " "mirror operation from others. Write one section for each " "mirror action with no limits in the number of sections.{0}{0}" "Values between curly brackets '{{}}' are required arguments " "and values between brackets '()' are optional arguments. If " "don't want optional arguments, left them blank. In case you do" " not specify a username and password, you must add the '-a' " "option which specifies that the connection is made with the " "anonymous user.{0}{0}The order of arguments doesn't matter, " "but all are needed.{0}{0}").format(os.linesep) parser = ArgumentParser(description=main_desc, epilog=epilog) subparsers = parser.add_subparsers(title="running modes", description=subs_desc) cron = subparsers.add_parser("cron", help="ideal to run as a scheduled task" ". Takes arguments from parameters defined " "within the script", usage=cron_use) cron.add_argument("cron", action="store_true", help=SUPPRESS, default=SUPPRESS) cron.add_argument("cfg", action="store_false", help=SUPPRESS, default=SUPPRESS) cfg = subparsers.add_parser("cfg", help="ideal for mirror multiple sites/" "directories. Imports the arguments from a " "config file", usage=file_use) cfg.add_argument("cron", action="store_false", help=SUPPRESS, default=SUPPRESS) cfg.add_argument("cfg", action="store_true", help=SUPPRESS, default=SUPPRESS) cfg.add_argument("config_file", help="config file to import arguments") shell = subparsers.add_parser("shell", help="usual mode, takes arguments " "from the command line ", usage=shell_use) shell.add_argument("cron", action="store_false", help=SUPPRESS, default=SUPPRESS) shell.add_argument("cfg", action="store_false", help=SUPPRESS, default=SUPPRESS) shell.add_argument("site", help="the ftp server (URL or IP)") shell.add_argument("remote", help="the remote directory") shell.add_argument("local", help="the local directory") auth = shell.add_mutually_exclusive_group(required=True) auth.add_argument("-l", "--login", dest="login", nargs=2, help="the ftp account's username and password", metavar=("user", "password")) auth.add_argument("-a", "--anon", action="store_true", dest="anonymous", help="set user as anonymous", default=False) shell.add_argument("-p", "--port", dest="port", default="", help="the ftp server port", metavar="port") shell.add_argument("-s", "--secure", action="store_const", const="s", dest="secure", default="", help="use the sftp protocol instead of ftp") shell.add_argument("-e", "--erase", action="store_const", const="e", dest="erase", default="", help="delete files not present at target site") shell.add_argument("-n", "--newer", action="store_const", const="n", dest="newer", default="", help="download only newer files") shell.add_argument("-P", "--parallel", action="store_const", const="P", dest="parallel", default="", help="download files in parallel") shell.add_argument("-r", "--reverse", action="store_const", const="R", dest="reverse", default="", help="reverse, upload files from local to remote") shell.add_argument("--delete-first", action="store_const", const=" --delete-first", dest="del_first", default="", help="delete old files before transferring new ones") shell.add_argument("--depth-first", action="store_const", const=" --depth-first", dest="depth_first", default="", help="descend into subdirectories, before transfer files") shell.add_argument("--no-empty-dirs", action="store_const", const=" --no-empty-dirs", dest="no_empty_dir", default="", help="don't create empty dirs (needs --depth-first)") shell.add_argument("--no-recursion", action="store_const", const=" --no-recursion", dest="no_recursion", default="", help="don't go to subdirectories") shell.add_argument("--dry-run", action="store_const", const=" --dry-run", dest="dry_run", default="", help="simulation, don't execute anything. Writes to log") shell.add_argument("--use-cache", action="store_const", const=" --use-cache", dest="use_cache", default="", help="use cached directory listings") shell.add_argument("--del-source", action="store_const", const=" --Remove-source-files", dest="del_source", default="", help="remove files (no dirs) after transfer (Caution!)") shell.add_argument("--only-missing", action="store_const", const=" --only-missing", dest="missing", default="", help="download only missing files") shell.add_argument("--only-existing", action="store_const", const=" --only-existing", dest="existing", default="", help="download only files already existing at target") shell.add_argument("--loop", action="store_const", const=" --loop", dest="loop", default="", help="Loop until no changes found") shell.add_argument("--ignore-size", action="store_const", const=" --ignore-size", dest="size", default="", help="ignore size when deciding whether to download") shell.add_argument("--ignore-time", action="store_const", const=" --ignore-time", dest="time", default="", help="ignore time when deciding whether to download") shell.add_argument("--no-perms", action="store_const", const=" --no-perms", dest="no_perms", default="", help="don't set cfg permissions") shell.add_argument("--no-umask", action="store_const", const=" --no-umask", dest="no_umask", default="", help="don't apply umask to cfg modes") shell.add_argument("--no-symlinks", action="store_const", const=" --no-symlinks", dest="no_symlinks", default="", help="don't create symbolic links") shell.add_argument("--allow-suid", action="store_const", const=" --allow-suid", dest="suid", default="", help="set suid/sgid bits according to remote site") shell.add_argument("--allow-chown", action="store_const", const=" --allow-chown", dest="chown", default="", help="try to set owner and group on files") shell.add_argument("--dereference", action="store_const", const=" --dereference", dest="dereference", default="", help="download symbolic links as files") shell.add_argument("--exclude-glob", dest="exc_glob", default="", metavar="GP", help="exclude matching files. GP is a glob pattern, e.g." " '*.zip'") shell.add_argument("--include-glob", dest="inc_glob", default="", metavar="GP", help="include matching files. GP is a glob pattern, e.g." " '*.zip'") shell.add_argument("-q", "--quiet", action="store_true", dest="quiet", help="the detailed shell process is no " "displayed, but is added to the log", default=False) shell.add_argument("--no-compress", action="store_true", dest="no_compress", help="don't create daily archive " "files", default=False) shell.add_argument("--no-email", action="store_true", dest="no_email", help="no sends email with the log", default=False) shell.add_argument("--smtp_server", dest="smtp_server", default="localhost", metavar="server", help="set a smtp server") shell.add_argument("--smtp_user", dest="smtp_user", default="", metavar="user", help="the smtp server username") shell.add_argument("--smtp_pass", dest="smtp_pass", default="", metavar="password", help="the smtp server password") shell.add_argument("--from_addr", dest="from_addr", default="", metavar="email", help="sender's email address") shell.add_argument("--to_addrs", dest="to_addrs", default="", nargs='+', metavar="email", help="a list of receiver(s)' email address(es)") parser.add_argument("-v", "--version", action="version", version="%(prog)s {0}".format(__version__), help="show program's version number and exit") return parser
help="don't apply umask to cfg modes")
help="don't apply umask to file modes")
def arguments(): """Defines the command line arguments for the script.""" main_desc = ("Mirror a remote FTP directory into a local directory or vice " "versa through the lftp program") subs_desc = "Select a running mode from the following:" epilog = ("For detailed help for each mode, select a mode followed by help " "option, e.g.:{0}{0}%(prog)s shell -h").format(os.linesep) cron_use = "%(prog)s [-h]" shell_use = ("%(prog)s site remote local [options]{0}{0}By default " "downloads the changes from remote FTP directory to local " "directory.{0}To upload changes from local to remote FTP, use " "the 'r, --reverse' option").format(os.linesep) file_use = ("%(prog)s config_file [-h]{0}{0}The structure of the " "config file (a simple text file) is as follows:{0}{0}[section]" "{0}site = {{ftp server URL or IP}}{0}port = (ftp server port)" "{0}remote = {{remote directory}}{0}local = {{local directory}}" "{0}user = (ftp server username){0}password = (user password " "encoded in base64){0}options = (other options){0}{0}Section is" " a name that defines the mirror operation. Usually is the ftp " "server's name or directory' name. Useful for distinguish one " "mirror operation from others. Write one section for each " "mirror action with no limits in the number of sections.{0}{0}" "Values between curly brackets '{{}}' are required arguments " "and values between brackets '()' are optional arguments. If " "don't want optional arguments, left them blank. In case you do" " not specify a username and password, you must add the '-a' " "option which specifies that the connection is made with the " "anonymous user.{0}{0}The order of arguments doesn't matter, " "but all are needed.{0}{0}").format(os.linesep) parser = ArgumentParser(description=main_desc, epilog=epilog) subparsers = parser.add_subparsers(title="running modes", description=subs_desc) cron = subparsers.add_parser("cron", help="ideal to run as a scheduled task" ". Takes arguments from parameters defined " "within the script", usage=cron_use) cron.add_argument("cron", action="store_true", help=SUPPRESS, default=SUPPRESS) cron.add_argument("cfg", action="store_false", help=SUPPRESS, default=SUPPRESS) cfg = subparsers.add_parser("cfg", help="ideal for mirror multiple sites/" "directories. Imports the arguments from a " "config file", usage=file_use) cfg.add_argument("cron", action="store_false", help=SUPPRESS, default=SUPPRESS) cfg.add_argument("cfg", action="store_true", help=SUPPRESS, default=SUPPRESS) cfg.add_argument("config_file", help="config file to import arguments") shell = subparsers.add_parser("shell", help="usual mode, takes arguments " "from the command line ", usage=shell_use) shell.add_argument("cron", action="store_false", help=SUPPRESS, default=SUPPRESS) shell.add_argument("cfg", action="store_false", help=SUPPRESS, default=SUPPRESS) shell.add_argument("site", help="the ftp server (URL or IP)") shell.add_argument("remote", help="the remote directory") shell.add_argument("local", help="the local directory") auth = shell.add_mutually_exclusive_group(required=True) auth.add_argument("-l", "--login", dest="login", nargs=2, help="the ftp account's username and password", metavar=("user", "password")) auth.add_argument("-a", "--anon", action="store_true", dest="anonymous", help="set user as anonymous", default=False) shell.add_argument("-p", "--port", dest="port", default="", help="the ftp server port", metavar="port") shell.add_argument("-s", "--secure", action="store_const", const="s", dest="secure", default="", help="use the sftp protocol instead of ftp") shell.add_argument("-e", "--erase", action="store_const", const="e", dest="erase", default="", help="delete files not present at target site") shell.add_argument("-n", "--newer", action="store_const", const="n", dest="newer", default="", help="download only newer files") shell.add_argument("-P", "--parallel", action="store_const", const="P", dest="parallel", default="", help="download files in parallel") shell.add_argument("-r", "--reverse", action="store_const", const="R", dest="reverse", default="", help="reverse, upload files from local to remote") shell.add_argument("--delete-first", action="store_const", const=" --delete-first", dest="del_first", default="", help="delete old files before transferring new ones") shell.add_argument("--depth-first", action="store_const", const=" --depth-first", dest="depth_first", default="", help="descend into subdirectories, before transfer files") shell.add_argument("--no-empty-dirs", action="store_const", const=" --no-empty-dirs", dest="no_empty_dir", default="", help="don't create empty dirs (needs --depth-first)") shell.add_argument("--no-recursion", action="store_const", const=" --no-recursion", dest="no_recursion", default="", help="don't go to subdirectories") shell.add_argument("--dry-run", action="store_const", const=" --dry-run", dest="dry_run", default="", help="simulation, don't execute anything. Writes to log") shell.add_argument("--use-cache", action="store_const", const=" --use-cache", dest="use_cache", default="", help="use cached directory listings") shell.add_argument("--del-source", action="store_const", const=" --Remove-source-files", dest="del_source", default="", help="remove files (no dirs) after transfer (Caution!)") shell.add_argument("--only-missing", action="store_const", const=" --only-missing", dest="missing", default="", help="download only missing files") shell.add_argument("--only-existing", action="store_const", const=" --only-existing", dest="existing", default="", help="download only files already existing at target") shell.add_argument("--loop", action="store_const", const=" --loop", dest="loop", default="", help="Loop until no changes found") shell.add_argument("--ignore-size", action="store_const", const=" --ignore-size", dest="size", default="", help="ignore size when deciding whether to download") shell.add_argument("--ignore-time", action="store_const", const=" --ignore-time", dest="time", default="", help="ignore time when deciding whether to download") shell.add_argument("--no-perms", action="store_const", const=" --no-perms", dest="no_perms", default="", help="don't set cfg permissions") shell.add_argument("--no-umask", action="store_const", const=" --no-umask", dest="no_umask", default="", help="don't apply umask to cfg modes") shell.add_argument("--no-symlinks", action="store_const", const=" --no-symlinks", dest="no_symlinks", default="", help="don't create symbolic links") shell.add_argument("--allow-suid", action="store_const", const=" --allow-suid", dest="suid", default="", help="set suid/sgid bits according to remote site") shell.add_argument("--allow-chown", action="store_const", const=" --allow-chown", dest="chown", default="", help="try to set owner and group on files") shell.add_argument("--dereference", action="store_const", const=" --dereference", dest="dereference", default="", help="download symbolic links as files") shell.add_argument("--exclude-glob", dest="exc_glob", default="", metavar="GP", help="exclude matching files. GP is a glob pattern, e.g." " '*.zip'") shell.add_argument("--include-glob", dest="inc_glob", default="", metavar="GP", help="include matching files. GP is a glob pattern, e.g." " '*.zip'") shell.add_argument("-q", "--quiet", action="store_true", dest="quiet", help="the detailed shell process is no " "displayed, but is added to the log", default=False) shell.add_argument("--no-compress", action="store_true", dest="no_compress", help="don't create daily archive " "files", default=False) shell.add_argument("--no-email", action="store_true", dest="no_email", help="no sends email with the log", default=False) shell.add_argument("--smtp_server", dest="smtp_server", default="localhost", metavar="server", help="set a smtp server") shell.add_argument("--smtp_user", dest="smtp_user", default="", metavar="user", help="the smtp server username") shell.add_argument("--smtp_pass", dest="smtp_pass", default="", metavar="password", help="the smtp server password") shell.add_argument("--from_addr", dest="from_addr", default="", metavar="email", help="sender's email address") shell.add_argument("--to_addrs", dest="to_addrs", default="", nargs='+', metavar="email", help="a list of receiver(s)' email address(es)") parser.add_argument("-v", "--version", action="version", version="%(prog)s {0}".format(__version__), help="show program's version number and exit") return parser
def bes_unit_size(f_size): """Get a size in bytes and convert it for the best unit for readability. Return two values: (int) bu_size -- Size of the path converted to the best unit for easy read (str) unit -- The units (IEC) for bu_size (from bytes(2^0) to YiB(2^80))
def best_unit_size(bytes_size): """Get a size in bytes & convert it to the best IEC prefix for readability. Return a dictionary with three pair of keys/values: 's' -- (float) Size of path converted to the best unit for easy read 'u' -- (str) The prefix (IEC) for s (from bytes(2^0) to YiB(2^80)) 'b' -- (int / long) The original size in bytes
def bes_unit_size(f_size): """Get a size in bytes and convert it for the best unit for readability. Return two values: (int) bu_size -- Size of the path converted to the best unit for easy read (str) unit -- The units (IEC) for bu_size (from bytes(2^0) to YiB(2^80)) """ for exp in range(0, 90 , 10): bu_size = f_size / pow(2.0, exp) if int(bu_size) < 2 ** 10: unit = {0:'bytes', 10:'KiB', 20:'MiB', 30:'GiB', 40:'TiB', 50:'PiB', 60:'EiB', 70:'ZiB', 80:'YiB'}[exp] break return {'s':bu_size, 'u':unit}
bu_size = f_size / pow(2.0, exp)
bu_size = abs(bytes_size) / pow(2.0, exp)
def bes_unit_size(f_size): """Get a size in bytes and convert it for the best unit for readability. Return two values: (int) bu_size -- Size of the path converted to the best unit for easy read (str) unit -- The units (IEC) for bu_size (from bytes(2^0) to YiB(2^80)) """ for exp in range(0, 90 , 10): bu_size = f_size / pow(2.0, exp) if int(bu_size) < 2 ** 10: unit = {0:'bytes', 10:'KiB', 20:'MiB', 30:'GiB', 40:'TiB', 50:'PiB', 60:'EiB', 70:'ZiB', 80:'YiB'}[exp] break return {'s':bu_size, 'u':unit}
return {'s':bu_size, 'u':unit}
return {'s':bu_size, 'u':unit, 'b':bytes_size}
def bes_unit_size(f_size): """Get a size in bytes and convert it for the best unit for readability. Return two values: (int) bu_size -- Size of the path converted to the best unit for easy read (str) unit -- The units (IEC) for bu_size (from bytes(2^0) to YiB(2^80)) """ for exp in range(0, 90 , 10): bu_size = f_size / pow(2.0, exp) if int(bu_size) < 2 ** 10: unit = {0:'bytes', 10:'KiB', 20:'MiB', 30:'GiB', 40:'TiB', 50:'PiB', 60:'EiB', 70:'ZiB', 80:'YiB'}[exp] break return {'s':bu_size, 'u':unit}
if os.path.isfile(the_path): path_size = os.path.getsize(the_path) for path, dirs, files in os.walk(the_path): for fil in files: filename = os.path.join(path, fil) path_size += os.path.getsize(filename)
for path, directories, files in os.walk(the_path): for filename in files: path_size += os.lstat(os.path.join(path, filename)).st_size for directory in directories: path_size += os.lstat(os.path.join(path, directory)).st_size path_size += os.path.getsize(the_path)
def get_size(the_path): """Get size of a directory tree or a file in bytes.""" path_size = 0 if os.path.isfile(the_path): path_size = os.path.getsize(the_path) for path, dirs, files in os.walk(the_path): for fil in files: filename = os.path.join(path, fil) path_size += os.path.getsize(filename) return path_size
log_size = get_size(log.filename)
log_size = get_size(log.filename) if os.path.exists(log.filename) else 0
def mirror(args, log): """Mirror the directories.""" user = '' if args.anonymous else ' '.join(args.login) local, remote = os.path.normpath(args.local), os.path.normpath(args.remote) port = '-p {0}'.format(args.port) if args.port else '' include = ' --include-glob {0}'.format(args.inc_glob) if args.inc_glob else '' exclude = ' --exclude-glob {0}'.format(args.exc_glob) if args.exc_glob else '' url = 'http://code.joedicastro.com/lftp-mirror' msg = 'Connected to {1} as {2}{0}'.format(os.linesep, args.site, 'anonymous' if args.anonymous else args.login[0]) msg += 'Mirror {0} to {1}'.format(local if args.reverse else remote, remote if args.reverse else local) log.header(url, msg) log.time('Start time') notify('Mirroring with {0}...'.format(args.site), 'sync') if not os.path.exists(local): os.mkdir(local) log.list('Created new directory', local) os.chdir(os.path.join(local, os.pardir)) # create the script file to import with lftp scp_args = ('-vvv' + args.erase + args.newer + args.parallel + args.reverse + args.del_first + args.depth_first + args.no_empty_dir + args.no_recursion + args.dry_run + args.use_cache + args.del_source + args.missing + args.existing + args.loop + args.size + args.time + args.no_perms + args.no_umask + args.no_symlinks + args.suid + args.chown + args.dereference + exclude + include) with open('ftpscript', 'w') as script: lines = ('open {0}ftp://{1} {2}'.format(args.secure, args.site, port), 'user {0}'.format(user), 'mirror {0} {1} {2}'.format(scp_args, local if args.reverse else remote, remote if args.reverse else local), 'exit') script.write(os.linesep.join(lines)) # mirror cmd = ['lftp', '-d', '-f', script.name] sync = Popen(cmd, stdout=PIPE, stderr={True:STDOUT, False:None}[args.quiet]) # end mirroring log.list('lftp output', ''.join(sync.stdout.readlines())) # compress the dir and create a .gz file with date if not args.reverse and not args.no_compress: notify('Compressing folder...', 'info') log.list('Rotate compressed copies', compress(local)) # end compress gz_size = sum([get_size(gz) for gz in glob.glob('{0}*.gz'.format(local))]) log_size = get_size(log.filename) local_size = get_size(local) size = bes_unit_size(local_size + gz_size + log_size) log.block('Disk space used', '{0:>76.2f} {1}'.format(size['s'], size['u'])) log.time('End Time') log.free(os.linesep * 2) log.write(True) os.remove(script.name)
size = bes_unit_size(local_size + gz_size + log_size)
size = best_unit_size(local_size + gz_size + log_size)
def mirror(args, log): """Mirror the directories.""" user = '' if args.anonymous else ' '.join(args.login) local, remote = os.path.normpath(args.local), os.path.normpath(args.remote) port = '-p {0}'.format(args.port) if args.port else '' include = ' --include-glob {0}'.format(args.inc_glob) if args.inc_glob else '' exclude = ' --exclude-glob {0}'.format(args.exc_glob) if args.exc_glob else '' url = 'http://code.joedicastro.com/lftp-mirror' msg = 'Connected to {1} as {2}{0}'.format(os.linesep, args.site, 'anonymous' if args.anonymous else args.login[0]) msg += 'Mirror {0} to {1}'.format(local if args.reverse else remote, remote if args.reverse else local) log.header(url, msg) log.time('Start time') notify('Mirroring with {0}...'.format(args.site), 'sync') if not os.path.exists(local): os.mkdir(local) log.list('Created new directory', local) os.chdir(os.path.join(local, os.pardir)) # create the script file to import with lftp scp_args = ('-vvv' + args.erase + args.newer + args.parallel + args.reverse + args.del_first + args.depth_first + args.no_empty_dir + args.no_recursion + args.dry_run + args.use_cache + args.del_source + args.missing + args.existing + args.loop + args.size + args.time + args.no_perms + args.no_umask + args.no_symlinks + args.suid + args.chown + args.dereference + exclude + include) with open('ftpscript', 'w') as script: lines = ('open {0}ftp://{1} {2}'.format(args.secure, args.site, port), 'user {0}'.format(user), 'mirror {0} {1} {2}'.format(scp_args, local if args.reverse else remote, remote if args.reverse else local), 'exit') script.write(os.linesep.join(lines)) # mirror cmd = ['lftp', '-d', '-f', script.name] sync = Popen(cmd, stdout=PIPE, stderr={True:STDOUT, False:None}[args.quiet]) # end mirroring log.list('lftp output', ''.join(sync.stdout.readlines())) # compress the dir and create a .gz file with date if not args.reverse and not args.no_compress: notify('Compressing folder...', 'info') log.list('Rotate compressed copies', compress(local)) # end compress gz_size = sum([get_size(gz) for gz in glob.glob('{0}*.gz'.format(local))]) log_size = get_size(log.filename) local_size = get_size(local) size = bes_unit_size(local_size + gz_size + log_size) log.block('Disk space used', '{0:>76.2f} {1}'.format(size['s'], size['u'])) log.time('End Time') log.free(os.linesep * 2) log.write(True) os.remove(script.name)