diff --git a/.gitignore b/.gitignore index d3ba2f5..0dfdfda 100644 --- a/.gitignore +++ b/.gitignore @@ -2,5 +2,6 @@ build/ dist/ *~ *.pyc -tests/.papers +tests/paper_test papers.egg-info +.DS_Store diff --git a/papers/color.py b/papers/color.py index 794d028..ada8dec 100644 --- a/papers/color.py +++ b/papers/color.py @@ -1,7 +1,7 @@ """ Small code to handle colored text """ - +import re bold = '\033[1m' end = '\033[0m' @@ -37,3 +37,10 @@ def setup(enable = True): dye = _dye else: dye = _nodye + + +undye_re = re.compile('\x1b\[[;\d]*[A-Za-z]') + +def undye(s): + """Purge string s of color""" + return undye_re.sub('', s) \ No newline at end of file diff --git a/papers/commands/remove_cmd.py b/papers/commands/remove_cmd.py index e1dfea4..00147f6 100644 --- a/papers/commands/remove_cmd.py +++ b/papers/commands/remove_cmd.py @@ -8,18 +8,21 @@ from ..events import RemoveEvent def parser(subparsers, config): parser = subparsers.add_parser('remove', help='removes a paper') + parser.add_argument('-f', '--force', action='store_true', default=None, + help="does not prompt for confirmation.") add_references_argument(parser) return parser -def command(config, ui, references): +def command(config, ui, force, references): rp = repo.Repository.from_directory(config) citekeys = parse_references(ui, rp, references) - are_you_sure = ("Are you sure you want to delete paper(s) [%s]" - " (this will also delete associated documents)?" - % ', '.join([color.dye(c, color.citekey) for c in citekeys])) - sure = ui.input_yn(question=are_you_sure, default='n') - if sure: + if force is None: + are_you_sure = ("Are you sure you want to delete paper(s) [%s]" + " (this will also delete associated documents)?" + % ', '.join([color.dye(c, color.citekey) for c in citekeys])) + sure = ui.input_yn(question=are_you_sure, default='n') + if force or sure: for c in citekeys: rmevent = RemoveEvent(config, ui, c) rmevent.send() diff --git a/papers/configs.py b/papers/configs.py index e137456..ae1ed2d 100644 --- a/papers/configs.py +++ b/papers/configs.py @@ -14,7 +14,7 @@ except KeyError: DEFAULT_IMPORT_COPY = 'yes' DEFAULT_IMPORT_MOVE = 'no' DEFAULT_COLOR = 'yes' -DEFAULT_PLUGINS = '' +DEFAULT_PLUGINS = 'texnote' CONFIG = ConfigParser.SafeConfigParser({ 'papers-directory': DEFAULT_PAPERS_DIRECTORY, diff --git a/papers/p3.py b/papers/p3.py new file mode 100644 index 0000000..d4b8702 --- /dev/null +++ b/papers/p3.py @@ -0,0 +1,9 @@ +import sys + +if sys.version_info[0] == 2: + import ConfigParser as configparser + import StringIO as io + input = raw_input +else: + import configparser + import io diff --git a/papers/papers b/papers/papers index 89c78de..1cc68db 100755 --- a/papers/papers +++ b/papers/papers @@ -1,50 +1,5 @@ #!/usr/bin/env python2 # -*- coding:utf-8 -*- - -import argparse -import collections - -from papers.ui import UI -from papers import configs -from papers import commands -from papers import plugin - -cmds = collections.OrderedDict([ - ('init', commands.init_cmd), - ('add', commands.add_cmd), - ('add_library', commands.add_library_cmd), - ('import', commands.import_cmd), - ('export', commands.export_cmd), - ('list', commands.list_cmd), - ('edit', commands.edit_cmd), - ('remove', commands.remove_cmd), - ('open', commands.open_cmd), - ('websearch', commands.websearch_cmd), - ('tag', commands.tag_cmd), - ('attach', commands.attach_cmd), - ('update', commands.update_cmd), - ]) - -config = configs.read_config() -ui = UI(config) - -# Extend with plugin commands -plugin.load_plugins(config, ui, configs.get_plugins(config)) -for p in plugin.get_plugins().values(): - cmds.update(collections.OrderedDict([(p.name, p)])) -# - -parser = argparse.ArgumentParser(description="research papers repository") -subparsers = parser.add_subparsers(title="valid commands", dest="command") - -for cmd_mod in cmds.values(): - subparser = cmd_mod.parser(subparsers, config) # why do we return the subparser ? - -args = parser.parse_args() -args.config = config -args.ui = ui -cmd = args.command -del args.command - -cmds[cmd].command(**vars(args)) +from papers import papers_cmd +papers_cmd.execute() \ No newline at end of file diff --git a/papers/papers_cmd.py b/papers/papers_cmd.py new file mode 100644 index 0000000..eb59bff --- /dev/null +++ b/papers/papers_cmd.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python2 +# -*- coding:utf-8 -*- +import sys + +import argparse +import collections + +from .ui import UI +from . import configs +from . import commands +from . import plugin + +cmds = collections.OrderedDict([ + ('init', commands.init_cmd), + ('add', commands.add_cmd), + ('add_library', commands.add_library_cmd), + ('import', commands.import_cmd), + ('export', commands.export_cmd), + ('list', commands.list_cmd), + ('edit', commands.edit_cmd), + ('remove', commands.remove_cmd), + ('open', commands.open_cmd), + ('websearch', commands.websearch_cmd), + ('tag', commands.tag_cmd), + ('attach', commands.attach_cmd), + ('update', commands.update_cmd), + ]) + + +def execute(raw_args = sys.argv): + config = configs.read_config() + ui = UI(config) + + # Extend with plugin commands + plugin.load_plugins(config, ui, configs.get_plugins(config)) + for p in plugin.get_plugins().values(): + cmds.update(collections.OrderedDict([(p.name, p)])) + + parser = argparse.ArgumentParser(description="research papers repository") + subparsers = parser.add_subparsers(title="valid commands", dest="command") + + for cmd_mod in cmds.values(): + subparser = cmd_mod.parser(subparsers, config) # why do we return the subparser ? + + args = parser.parse_args(raw_args[1:]) + args.config = config + + args.ui = ui + cmd = args.command + del args.command + + cmds[cmd].command(**vars(args)) diff --git a/papers/plugs/texnote/texnote.py b/papers/plugs/texnote/texnote.py index da26383..d550156 100644 --- a/papers/plugs/texnote/texnote.py +++ b/papers/plugs/texnote/texnote.py @@ -38,6 +38,10 @@ class TexnotePlugin(PapersPlugin): def toto(self): print "toto" + #@RemoveEvent.listen() + def testEvent(self, rmevent): + print "testEvent" + @RemoveEvent.listen() def remove(rmevent): diff --git a/tests/data/10.1371%2Fjournal.pone.0038236.bib b/tests/data/10.1371%2Fjournal.pone.0038236.bib new file mode 100644 index 0000000..f8c64d4 --- /dev/null +++ b/tests/data/10.1371%2Fjournal.pone.0038236.bib @@ -0,0 +1,19 @@ + +@article{10.1371_journal.pone.0038236, + author = {Caroline Lyon AND Chrystopher L. Nehaniv AND Joe Saunders}, + journal = {PLoS ONE}, + publisher = {Public Library of Science}, + title = {Interactive Language Learning by Robots: The Transition from Babbling to Word Forms}, + year = {2012}, + month = {06}, + volume = {7}, + url = {http://dx.doi.org/10.1371%2Fjournal.pone.0038236}, + pages = {e38236}, + abstract = {

The advent of humanoid robots has enabled a new approach to investigating the acquisition of language, and we report on the development of robots able to acquire rudimentary linguistic skills. Our work focuses on early stages analogous to some characteristics of a human child of about 6 to 14 months, the transition from babbling to first word forms. We investigate one mechanism among many that may contribute to this process, a key factor being the sensitivity of learners to the statistical distribution of linguistic elements. As well as being necessary for learning word meanings, the acquisition of anchor word forms facilitates the segmentation of an acoustic stream through other mechanisms. In our experiments some salient one-syllable word forms are learnt by a humanoid robot in real-time interactions with naive participants. Words emerge from random syllabic babble through a learning process based on a dialogue between the robot and the human participant, whose speech is perceived by the robot as a stream of phonemes. Numerous ways of representing the speech as syllabic segments are possible. Furthermore, the pronunciation of many words in spontaneous speech is variable. However, in line with research elsewhere, we observe that salient content words are more likely than function words to have consistent canonical representations; thus their relative frequency increases, as does their influence on the learner. Variable pronunciation may contribute to early word form acquisition. The importance of contingent interaction in real-time between teacher and learner is reflected by a reinforcement process, with variable success. The examination of individual cases may be more informative than group results. Nevertheless, word forms are usually produced by the robot after a few minutes of dialogue, employing a simple, real-time, frequency dependent mechanism. This work shows the potential of human-robot interaction systems in studies of the dynamics of early language acquisition.

}, + number = {6}, + doi = {10.1371/journal.pone.0038236} +} + + + + diff --git a/tests/data/martius.bib b/tests/data/martius.bib new file mode 100644 index 0000000..63f62c9 --- /dev/null +++ b/tests/data/martius.bib @@ -0,0 +1,19 @@ + +@article{10.1371_journal.pone.0063400, + author = {Georg Martius AND Ralf Der AND Nihat Ay}, + journal = {PLoS ONE}, + publisher = {Public Library of Science}, + title = {Information Driven Self-Organization of Complex Robotic Behaviors}, + year = {2013}, + month = {05}, + volume = {8}, + url = {http://dx.doi.org/10.1371%2Fjournal.pone.0063400}, + pages = {e63400}, + abstract = {

Information theory is a powerful tool to express principles to drive autonomous systems because it is domain invariant and allows for an intuitive interpretation. This paper studies the use of the predictive information (PI), also called excess entropy or effective measure complexity, of the sensorimotor process as a driving force to generate behavior. We study nonlinear and nonstationary systems and introduce the time-local predicting information (TiPI) which allows us to derive exact results together with explicit update rules for the parameters of the controller in the dynamical systems framework. In this way the information principle, formulated at the level of behavior, is translated to the dynamics of the synapses. We underpin our results with a number of case studies with high-dimensional robotic systems. We show the spontaneous cooperativity in a complex physical system with decentralized control. Moreover, a jointly controlled humanoid robot develops a high behavioral variety depending on its physics and the environment it is dynamically embedded into. The behavior can be decomposed into a succession of low-dimensional modes that increasingly explore the behavior space. This is a promising way to avoid the curse of dimensionality which hinders learning systems to scale well.

}, + number = {5}, + doi = {10.1371/journal.pone.0063400} +} + + + + diff --git a/tests/data/turing-mind-1950.pdf b/tests/data/turing-mind-1950.pdf new file mode 100644 index 0000000..4f1ba38 Binary files /dev/null and b/tests/data/turing-mind-1950.pdf differ diff --git a/tests/data/turing1950.bib b/tests/data/turing1950.bib new file mode 100644 index 0000000..2f53fa5 --- /dev/null +++ b/tests/data/turing1950.bib @@ -0,0 +1,10 @@ +@article{turing1950computing, + title={Computing machinery and intelligence}, + author={Turing, Alan M}, + journal={Mind}, + volume={59}, + number={236}, + pages={433--460}, + year={1950}, + publisher={JSTOR} +} diff --git a/tests/test.sh b/tests/test.sh index 1fdbf8c..7bfba55 100755 --- a/tests/test.sh +++ b/tests/test.sh @@ -1,10 +1,10 @@ #!/usr/bin/env bash -rm -Rf paper_test/; +rm -Rf paper_test/*; papers init -p paper_test/; papers add -d data/pagerank.pdf -b data/pagerank.bib; papers list; papers tag; -papers tag Page99 network,search; +papers tag Page99 network+search; papers tag Page99; papers tag search; papers tag 0; diff --git a/tests/test_tag.py b/tests/test_tag.py index 9b12b65..299bdf9 100644 --- a/tests/test_tag.py +++ b/tests/test_tag.py @@ -4,7 +4,7 @@ import unittest import testenv from papers.commands.tag_cmd import _parse_tags, _tag_groups -class TestCreateCitekey(unittest.TestCase): +class TestTag(unittest.TestCase): def test_tag_parsing(self): diff --git a/tests/test_usecase.py b/tests/test_usecase.py new file mode 100644 index 0000000..0c091df --- /dev/null +++ b/tests/test_usecase.py @@ -0,0 +1,172 @@ +import sys, os +import unittest +import pkgutil + +import testenv +import fake_filesystem +import fake_filesystem_shutil + +from papers import papers_cmd +from papers import color +from papers.p3 import io + +real_os = os +real_open = open + +fake_os, fake_open, fake_shutil = None, None, None + +def _create_fake_fs(): + global fake_os, fake_open, fake_shutil + + fake_fs = fake_filesystem.FakeFilesystem() + fake_os = fake_filesystem.FakeOsModule(fake_fs) + fake_open = fake_filesystem.FakeFileOpen(fake_fs) + fake_shutil = fake_filesystem_shutil.FakeShutilModule(fake_fs) + + fake_fs.CreateDirectory(fake_os.path.expanduser('~')) + __builtins__['open'] = fake_open + __builtins__['file'] = fake_open + + sys.modules['os'] = fake_os + sys.modules['shutil'] = fake_shutil + + import papers + for importer, modname, ispkg in pkgutil.walk_packages( + path=papers.__path__, + prefix=papers.__name__+'.', + onerror=lambda x: None): + md = __import__(modname, fromlist = 'dummy') + md.os = fake_os + md.shutil = fake_shutil + + return fake_fs + +def _copy_data(fs): + """Copy all the data directory into the fake fs""" + for filename in real_os.listdir('data/'): + filepath = 'data/' + filename + if real_os.path.isfile(filepath): + with real_open(filepath, 'r') as f: + fs.CreateFile(filepath, contents = f.read()) + if real_os.path.isdir(filepath): + fs.CreateDirectory(filepath) + +def redirect(f): + def newf(*args, **kwargs): + old_stderr, old_stdout = sys.stderr, sys.stdout + stdout = io.StringIO() + stderr = io.StringIO() + sys.stdout, sys.stderr = stdout, stderr + try: + return f(*args, **kwargs), stdout, stderr + finally: + sys.stderr, sys.stdout = old_stderr, old_stdout + return newf + +#@redirect +def _execute_cmds(cmds, fs = None): + if fs is None: + fs = _create_fake_fs() + _copy_data(fs) + + outs = [] + for cmd in cmds: + _, stdout, stderr = redirect(papers_cmd.execute)(cmd.split()) + outs.append(color.undye(stdout.getvalue())) + + return outs + + +class TestInit(unittest.TestCase): + + def test_init(self): + fs = _create_fake_fs() + papers_cmd.execute('papers init -p paper_test2'.split()) + self.assertEqual(set(fake_os.listdir('/paper_test2/')), {'bibdata', 'doc', 'meta', 'papers.yaml'}) + + +class TestAdd(unittest.TestCase): + + def test_add(self): + + fs = _create_fake_fs() + _copy_data(fs) + + papers_cmd.execute('papers init'.split()) + papers_cmd.execute('papers add -b /data/pagerank.bib -d /data/pagerank.pdf'.split()) + + def test_add2(self): + + fs = _create_fake_fs() + _copy_data(fs) + + papers_cmd.execute('papers init -p /not_default'.split()) + papers_cmd.execute('papers add -b /data/pagerank.bib -d /data/pagerank.pdf'.split()) + self.assertEqual(set(fake_os.listdir('/not_default/doc')), {'Page99.pdf'}) + + +class TestList(unittest.TestCase): + + def test_list(self): + + fs = _create_fake_fs() + _copy_data(fs) + + papers_cmd.execute('papers init -p /not_default2'.split()) + papers_cmd.execute('papers list'.split()) + papers_cmd.execute('papers add -b /data/pagerank.bib -d /data/pagerank.pdf'.split()) + papers_cmd.execute('papers list'.split()) + + +class TestUsecase(unittest.TestCase): + + def test_first(self): + + correct = ['Initializing papers in /paper_first/.\n', + 'Added: Page99\n', + '0: [Page99] L. Page et al. "The PageRank Citation Ranking Bringing Order to the Web" (1999) \n', + '', + '', + 'search network\n', + '0: [Page99] L. Page et al. "The PageRank Citation Ranking Bringing Order to the Web" (1999) search network\n', + 'search network\n'] + + cmds = ['papers init -p paper_first/', + 'papers add -d data/pagerank.pdf -b data/pagerank.bib', + 'papers list', + 'papers tag', + 'papers tag Page99 network+search', + 'papers tag Page99', + 'papers tag search', + 'papers tag 0', + ] + + self.assertEqual(correct, _execute_cmds(cmds)) + + def test_second(self): + + cmds = ['papers init -p paper_second/', + 'papers add -b data/pagerank.bib', + 'papers add -d data/turing-mind-1950.pdf -b data/turing1950.bib', + 'papers add -b data/martius.bib', + 'papers add -b data/10.1371%2Fjournal.pone.0038236.bib', + 'papers list', + 'papers attach Page99 data/pagerank.pdf' + ] + + _execute_cmds(cmds) + + def test_third(self): + + cmds = ['papers init', + 'papers add -b data/pagerank.bib', + 'papers add -d data/turing-mind-1950.pdf -b data/turing1950.bib', + 'papers add -b data/martius.bib', + 'papers add -b data/10.1371%2Fjournal.pone.0038236.bib', + 'papers list', + 'papers attach Page99 data/pagerank.pdf', + 'papers remove -f Page99', + 'papers remove -f turing1950computing', + ] + + _execute_cmds(cmds)