From 888ad29be0bfaaa9a361ec4cae3755b59b71dadb Mon Sep 17 00:00:00 2001 From: Timotej Lazar Date: Thu, 15 Jan 2015 13:32:33 +0100 Subject: Use relative imports in monkey modules --- monkey/edits.py | 8 ++++---- monkey/monkey.py | 14 +++++++------- monkey/prolog/util.py | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/monkey/edits.py b/monkey/edits.py index fef591a..349ebf7 100644 --- a/monkey/edits.py +++ b/monkey/edits.py @@ -2,10 +2,10 @@ import collections -from action import expand, parse -from graph import Node -from prolog.util import rename_vars, stringify, tokenize -from util import get_line +from .action import expand, parse +from .graph import Node +from .prolog.util import rename_vars, stringify, tokenize +from .util import get_line # A line edit is a contiguous sequences of actions within a single line. This # function takes a sequence of actions and builds a directed acyclic graph diff --git a/monkey/monkey.py b/monkey/monkey.py index 42c81f4..8e805f5 100755 --- a/monkey/monkey.py +++ b/monkey/monkey.py @@ -8,13 +8,13 @@ import time from termcolor import colored -import db -from action import parse -from edits import classify_edits, clean_graph, edit_graph, get_edits_from_traces -from graph import Node, graphviz -from prolog.engine import PrologEngine -from prolog.util import compose, decompose, map_vars, rename_vars, stringify -from util import PQueue, Token, indent +from . import db +from .action import parse +from .edits import classify_edits, clean_graph, edit_graph, get_edits_from_traces +from .graph import Node, graphviz +from .prolog.engine import PrologEngine +from .prolog.util import compose, decompose, map_vars, rename_vars, stringify +from .util import PQueue, Token, indent # score a program (a list of lines) according to lines distribution def score(program, lines): diff --git a/monkey/prolog/util.py b/monkey/prolog/util.py index 0ab3c8b..05505d0 100644 --- a/monkey/prolog/util.py +++ b/monkey/prolog/util.py @@ -5,7 +5,7 @@ import math import re from .lexer import lexer -from util import Token +from ..util import Token def tokenize(text): lexer.input(text) -- cgit v1.2.1