diff --git a/jedi/inference/base_value.py b/jedi/inference/base_value.py
index d0883307..c21a77b8 100644
--- a/jedi/inference/base_value.py
+++ b/jedi/inference/base_value.py
@@ -27,7 +27,9 @@ class HelperValueMixin:
"""
:param position: Position of the last statement -> tuple of line, column
"""
- pass
+ if name_context is None:
+ name_context = self
+ return self._wrapped_value.py__getattribute__(name_or_str, name_context, position, analysis_errors)
class Value(HelperValueMixin):
"""
@@ -46,13 +48,13 @@ class Value(HelperValueMixin):
Since Wrapper is a super class for classes, functions and modules,
the return value will always be true.
"""
- pass
+ return True
def py__getattribute__alternatives(self, name_or_str):
"""
For now a way to add values in cases like __getattr__.
"""
- pass
+ return NO_VALUES
def infer_type_vars(self, value_set):
"""
@@ -81,14 +83,35 @@ class Value(HelperValueMixin):
above example this would first be the representation of the list
`[1]` and then, when recursing, just of `1`.
"""
- pass
+ return {}
+
+def iterator_to_value_set(iterator):
+ """
+ Converts a generator of values to a ValueSet.
+ """
+ return ValueSet(iterator)
def iterate_values(values, contextualized_node=None, is_async=False):
"""
Calls `iterate`, on all values but ignores the ordering and just returns
all values that the iterate functions yield.
"""
- pass
+ if not values:
+ return NO_VALUES
+
+ result = set()
+ for value in values:
+ if is_async:
+ if hasattr(value, 'py__aiter__'):
+ result |= set(value.py__aiter__())
+ else:
+ debug.warning('No __aiter__ on %s', value)
+ else:
+ if hasattr(value, 'iterate'):
+ result |= set(value.iterate(contextualized_node))
+ else:
+ debug.warning('No iterate on %s', value)
+ return ValueSet(result)
class _ValueWrapperBase(HelperValueMixin):
@@ -139,7 +162,7 @@ class ValueSet:
"""
Used to work with an iterable of set.
"""
- pass
+ return cls(reduce(add, sets, frozenset()))
def __or__(self, other):
return self._from_frozen_set(self._set | other._set)
diff --git a/jedi/inference/cache.py b/jedi/inference/cache.py
index 5d3266df..f1daabde 100644
--- a/jedi/inference/cache.py
+++ b/jedi/inference/cache.py
@@ -8,6 +8,85 @@ from jedi import debug
_NO_DEFAULT = object()
_RECURSION_SENTINEL = object()
+def inference_state_function_cache(default=_NO_DEFAULT):
+ """
+ This is a special memoizer that caches a function call with the inference_state as
+ a parameter.
+ """
+ def decorator(func):
+ @wraps(func)
+ def wrapper(inference_state, *args, **kwargs):
+ key = (func, args, frozenset(kwargs.items()))
+ cache = inference_state.memoize_cache
+
+ try:
+ if key in cache:
+ return cache[key]
+
+ result = func(inference_state, *args, **kwargs)
+ if result is None and default is not _NO_DEFAULT:
+ result = default
+ cache[key] = result
+ return result
+ except TypeError:
+ debug.warning('Cache key is not hashable: %s %s %s', func, args, kwargs)
+ return func(inference_state, *args, **kwargs)
+
+ return wrapper
+ return decorator
+
+def inference_state_method_cache(default=_NO_DEFAULT):
+ """
+ This is a special memoizer that caches a method call with the inference_state as
+ a parameter.
+ """
+ def decorator(func):
+ @wraps(func)
+ def wrapper(inference_state, *args, **kwargs):
+ key = (func, args, frozenset(kwargs.items()))
+ cache = inference_state.memoize_cache
+
+ try:
+ if key in cache:
+ return cache[key]
+
+ result = func(inference_state, *args, **kwargs)
+ if result is None and default is not _NO_DEFAULT:
+ result = default
+ cache[key] = result
+ return result
+ except TypeError:
+ debug.warning('Cache key is not hashable: %s %s %s', func, args, kwargs)
+ return func(inference_state, *args, **kwargs)
+
+ return wrapper
+ return decorator
+
+def inference_state_as_method_param_cache():
+ """
+ This is a special memoizer that caches a method call with the inference_state as
+ a parameter. It also checks for recursion errors.
+ """
+ def decorator(func):
+ @wraps(func)
+ def wrapper(cls, inference_state, *args, **kwargs):
+ key = (func, args, frozenset(kwargs.items()))
+ cache = inference_state.memoize_cache
+
+ try:
+ if key in cache:
+ return cache[key]
+
+ cache[key] = _RECURSION_SENTINEL
+ result = cache[key] = func(cls, inference_state, *args, **kwargs)
+ return result
+ except TypeError:
+ debug.warning('Cache key is not hashable: %s %s %s', func, args, kwargs)
+ return func(cls, inference_state, *args, **kwargs)
+
+ return wrapper
+ return decorator
+
def _memoize_default(default=_NO_DEFAULT, inference_state_is_first_arg=False, second_arg_is_inference_state=False):
""" This is a typical memoization decorator, BUT there is one difference:
To prevent recursion it sets defaults.
@@ -16,7 +95,34 @@ def _memoize_default(default=_NO_DEFAULT, inference_state_is_first_arg=False, se
don't think, that there is a big speed difference, but there are many cases
where recursion could happen (think about a = b; b = a).
"""
- pass
+ def func_decorator(func):
+ def wrapper(*args, **kwargs):
+ if inference_state_is_first_arg:
+ inference_state = args[0]
+ elif second_arg_is_inference_state:
+ inference_state = args[1]
+ else:
+ inference_state = None
+
+ if inference_state is not None:
+ cache = inference_state.memoize_cache
+ else:
+ cache = {}
+
+ key = (func, args, frozenset(kwargs.items()))
+ try:
+ if key in cache:
+ return cache[key]
+ else:
+ result = cache[key] = func(*args, **kwargs)
+ return result
+ except TypeError:
+ debug.warning('Cache key is not hashable: %s %s %s', func, args, kwargs)
+ return func(*args, **kwargs)
+
+ return wrapper
+
+ return func_decorator
class CachedMetaClass(type):
"""
@@ -34,4 +140,27 @@ def inference_state_method_generator_cache():
This is a special memoizer. It memoizes generators and also checks for
recursion errors and returns no further iterator elemends in that case.
"""
- pass
\ No newline at end of file
+ def decorator(func):
+ @wraps(func)
+ def wrapper(inference_state, *args, **kwargs):
+ key = (func, args, frozenset(kwargs.items()))
+ cache = inference_state.memoize_cache
+
+ try:
+ if key in cache:
+ return cache[key]
+
+ # Create a generator and cache it.
+ generator = func(inference_state, *args, **kwargs)
+ cache[key] = _RECURSION_SENTINEL
+
+ # Generate all values and cache them.
+ values = list(generator)
+ cache[key] = values
+ return values
+ except TypeError:
+ debug.warning('Cache key is not hashable: %s %s %s', func, args, kwargs)
+ return func(inference_state, *args, **kwargs)
+
+ return wrapper
+ return decorator
\ No newline at end of file
diff --git a/jedi/inference/compiled/value.py b/jedi/inference/compiled/value.py
index d48f4959..87d4ddde 100644
--- a/jedi/inference/compiled/value.py
+++ b/jedi/inference/compiled/value.py
@@ -107,12 +107,37 @@ class CompiledValueFilter(AbstractFilter):
"""
To remove quite a few access calls we introduced the callback here.
"""
- pass
+ has_attribute, is_descriptor = allowed_getattr_callback(name)
+ if not has_attribute and in_dir_callback(name):
+ return iter([])
+
+ if check_has_attribute and not has_attribute:
+ return iter([])
+
+ return iter([CompiledName(
+ self._inference_state,
+ self.compiled_value,
+ name,
+ is_descriptor
+ )])
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.compiled_value)
docstr_defaults = {'floating point number': 'float', 'character': 'str', 'integer': 'int', 'dictionary': 'dict', 'string': 'str'}
+@inference_state_function_cache()
+def create_from_access_path(inference_state, access_path):
+ """
+ Creates a compiled value from an access path.
+ """
+ value = None
+ for name, access in access_path.accesses:
+ if value is None:
+ value = CompiledValue(inference_state, access)
+ else:
+ value = value.py__getattribute__(name)[0]
+ return value
+
def _parse_function_doc(doc):
"""
Takes a function and returns the params and return value as a tuple.
@@ -121,8 +146,53 @@ def _parse_function_doc(doc):
TODO docstrings like utime(path, (atime, mtime)) and a(b [, b]) -> None
TODO docstrings like 'tuple of integers'
"""
- pass
+ if doc is None:
+ return None, None
+
+ doc = doc.strip()
+ if not doc:
+ return None, None
+
+ # Get rid of multiple spaces
+ doc = ' '.join(doc.split())
+
+ # Parse return value
+ return_string = None
+ arrow_index = doc.find('->')
+ if arrow_index != -1:
+ return_string = doc[arrow_index + 2:].strip()
+ if return_string:
+ # Get rid of "Returns" prefix
+ return_string = re.sub('^returns\s+', '', return_string.lower())
+ # Get rid of punctuation
+ return_string = re.sub('[^\w\s]', '', return_string)
+ # Handle "floating point number" and the like
+ for type_string, actual in docstr_defaults.items():
+ return_string = return_string.replace(type_string, actual)
+ # Handle "sequence of" and the like
+ return_string = re.sub('^sequence of\s+', '', return_string)
+ return_string = re.sub('\s*sequence\s*$', '', return_string)
+
+ # Parse parameters
+ param_string = doc[:arrow_index if arrow_index != -1 else len(doc)]
+ if not param_string.strip():
+ return None, return_string
+
+ # Get rid of "Parameters:" prefix
+ param_string = re.sub('^parameters:\s*', '', param_string.lower())
+ # Get rid of punctuation
+ param_string = re.sub('[^\w\s,\[\]]', '', param_string)
+ # Split parameters
+ params = [p.strip() for p in param_string.split(',')]
+ # Get rid of empty parameters
+ params = [p for p in params if p]
+ # Handle optional parameters
+ params = [re.sub(r'\[([^\]]+)\]', r'\1', p) for p in params]
+
+ return params, return_string
def _normalize_create_args(func):
"""The cache doesn't care about keyword vs. normal args."""
- pass
\ No newline at end of file
+ def wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return wrapper
\ No newline at end of file
diff --git a/jedi/inference/helpers.py b/jedi/inference/helpers.py
index 7a8c2586..f58ff39c 100644
--- a/jedi/inference/helpers.py
+++ b/jedi/inference/helpers.py
@@ -6,11 +6,92 @@ from itertools import chain
from contextlib import contextmanager
from parso.python import tree
+def is_big_annoying_library(value):
+ """
+ Checks if a value is part of a library that is known to cause issues.
+ """
+ string = value.get_root_context().py__file__()
+ if string is None:
+ return False
+
+ # These libraries cause problems, because they have really strange ways of
+ # using modules. They are also very big (numpy) and therefore really slow.
+ parts = string.split(os.path.sep)
+ match = any(x in parts for x in ['numpy', 'scipy', 'tensorflow', 'matplotlib', 'pandas'])
+ return match
+
+def reraise_getitem_errors(func):
+ """
+ Re-throw any SimpleGetItemNotFound errors as KeyError or IndexError.
+ """
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except SimpleGetItemNotFound as e:
+ if isinstance(e.args[1], str):
+ raise KeyError(e.args[1])
+ else:
+ raise IndexError(e.args[1])
+ return wrapper
+
def deep_ast_copy(obj):
"""
Much, much faster than copy.deepcopy, but just for parser tree nodes.
"""
- pass
+ if isinstance(obj, tree.Leaf):
+ obj_copy = copy.copy(obj)
+ obj_copy.parent = None
+ return obj_copy
+
+ if isinstance(obj, tree.BaseNode):
+ new_children = []
+ for child in obj.children:
+ if isinstance(child, (tree.Leaf, tree.BaseNode)):
+ new_children.append(deep_ast_copy(child))
+ else:
+ new_children.append(child)
+
+ obj_copy = copy.copy(obj)
+ obj_copy.children = new_children
+ for child in new_children:
+ if isinstance(child, (tree.Leaf, tree.BaseNode)):
+ child.parent = obj_copy
+ obj_copy.parent = None
+ return obj_copy
+
+ return obj
+
+def is_string(value):
+ """
+ Checks if a value is a string.
+ """
+ return isinstance(value, str)
+
+def get_str_or_none(value):
+ """
+ Gets a string from a value or returns None if it's not a string.
+ """
+ if is_string(value):
+ return value
+ return None
+
+def get_int_or_none(value):
+ """
+ Gets an int from a value or returns None if it's not an int.
+ """
+ if isinstance(value, int):
+ return value
+ return None
+
+def values_from_qualified_names(inference_state, *names):
+ """
+ Generates values from qualified names like 'collections.defaultdict'.
+ """
+ module_value = inference_state.builtins_module
+ for name in names:
+ module_value = module_value.py__getattribute__(name)[0]
+ return module_value
def infer_call_of_leaf(context, leaf, cut_own_trailer=False):
"""
@@ -31,7 +112,16 @@ def infer_call_of_leaf(context, leaf, cut_own_trailer=False):
- infer the type of ``bar`` to be able to jump to the definition of foo
The option ``cut_own_trailer`` must be set to true for the second purpose.
"""
- pass
+ node = leaf
+ while node.parent is not None:
+ node = node.parent
+ if node.type in ('trailer', 'power'):
+ if node.type == 'trailer' and cut_own_trailer and node.children[-1] is leaf:
+ continue
+ node = deep_ast_copy(node)
+ context = context.eval_node(node)
+ return context
+ return context
class SimpleGetItemNotFound(Exception):
pass
\ No newline at end of file
diff --git a/jedi/inference/lazy_value.py b/jedi/inference/lazy_value.py
index 4cc4bfbe..c96792a0 100644
--- a/jedi/inference/lazy_value.py
+++ b/jedi/inference/lazy_value.py
@@ -29,5 +29,13 @@ class LazyTreeValue(AbstractLazyValue):
self.context = context
self._predefined_names = dict(context.predefined_names)
+def get_merged_lazy_value(lazy_values):
+ """
+ Returns a merged lazy value, which means that the values will be merged.
+ """
+ if len(lazy_values) == 1:
+ return lazy_values[0]
+ return MergedLazyValues(lazy_values)
+
class MergedLazyValues(AbstractLazyValue):
"""data is a list of lazy values."""
\ No newline at end of file
diff --git a/jedi/inference/utils.py b/jedi/inference/utils.py
index 5d00a322..ef6857d8 100644
--- a/jedi/inference/utils.py
+++ b/jedi/inference/utils.py
@@ -10,7 +10,7 @@ del _sep
def unite(iterable):
"""Turns a two dimensional array into a one dimensional."""
- pass
+ return set(x for objects in iterable for x in objects)
class UncaughtAttributeError(Exception):
"""
@@ -40,7 +40,33 @@ def reraise_uncaught(func):
`AttributeError` to `UncaughtAttributeError` to avoid unexpected catch.
This helps us noticing bugs earlier and facilitates debugging.
"""
- pass
+ @functools.wraps(func)
+ def wrapper(*args, **kwds):
+ try:
+ return func(*args, **kwds)
+ except AttributeError as e:
+ raise UncaughtAttributeError(e) from e
+ return wrapper
+
+def safe_property(func):
+ """
+ Property decorator that wraps the getter in a reraise_uncaught decorator.
+ """
+ return property(reraise_uncaught(func))
+
+def to_list(func):
+ """
+ Decorator that ensures that a function returns a list.
+ """
+ def wrapper(*args, **kwargs):
+ result = func(*args, **kwargs)
+ if isinstance(result, (list, tuple)):
+ return list(result)
+ elif result is None:
+ return []
+ else:
+ return [result]
+ return wrapper
class PushBackIterator:
diff --git a/jedi/parser_utils.py b/jedi/parser_utils.py
index 67a7aea8..ef296bef 100644
--- a/jedi/parser_utils.py
+++ b/jedi/parser_utils.py
@@ -6,14 +6,115 @@ from weakref import WeakKeyDictionary
from parso.python import tree
from parso.cache import parser_cache
from parso import split_lines
+
_EXECUTE_NODES = {'funcdef', 'classdef', 'import_from', 'import_name', 'test', 'or_test', 'and_test', 'not_test', 'comparison', 'expr', 'xor_expr', 'and_expr', 'shift_expr', 'arith_expr', 'atom_expr', 'term', 'factor', 'power', 'atom'}
_FLOW_KEYWORDS = ('try', 'except', 'finally', 'else', 'if', 'elif', 'with', 'for', 'while')
+def find_statement_documentation(node):
+ """
+ Finds the statement's documentation node by looking at the parso node.
+ """
+ if node.type == 'expr_stmt':
+ return node.get_doc_node()
+ return None
+
+def is_scope(node):
+ """
+ Returns True if the node is a scope (function, class, etc).
+ """
+ return node.type in ('classdef', 'funcdef', 'file_input')
+
+def get_flow_branch_keyword(flow_node, node):
+ """
+ Returns the keyword associated with a flow branch.
+ """
+ if flow_node.type == 'if_stmt':
+ for i, child in enumerate(flow_node.children):
+ if child == 'else' and node.start_pos > child.start_pos:
+ return 'else'
+ elif child.type == 'suite' and node.start_pos >= child.start_pos:
+ return flow_node.children[i - 1].value
+
+ elif flow_node.type == 'try_stmt':
+ for i, child in enumerate(flow_node.children):
+ if child == 'else' and node.start_pos > child.start_pos:
+ return 'else'
+ elif child == 'finally' and node.start_pos > child.start_pos:
+ return 'finally'
+ elif child.type == 'except_clause' and node.start_pos >= flow_node.children[i + 1].start_pos:
+ return 'except'
+
+ return None
+
+def _get_parent_scope_cache(func):
+ """
+ This is a cache to avoid multiple lookups of parent scopes.
+ """
+ cache = WeakKeyDictionary()
+
+ def wrapper(node, *args, **kwargs):
+ try:
+ return cache[node]
+ except KeyError:
+ result = cache[node] = func(node, *args, **kwargs)
+ return result
+
+ return wrapper
+
+def _function_is_x_method(name, other_name=None):
+ def wrapper(func):
+ decorators = func.get_decorators()
+ if not decorators:
+ return False
+
+ for decorator in decorators:
+ dotted_name = decorator.children[1]
+ if not isinstance(dotted_name, tree.Name):
+ continue
+
+ value = dotted_name.value
+ if value == name or other_name is not None and value == other_name:
+ return True
+ return False
+ return wrapper
+
+function_is_staticmethod = _function_is_x_method('staticmethod')
+function_is_classmethod = _function_is_x_method('classmethod')
+function_is_property = _function_is_x_method('property', 'cached_property')
+
def get_executable_nodes(node, last_added=False):
"""
- For static analysis.
+ For static analysis. Returns a generator of nodes that are executed in
+ order.
"""
- pass
+ def check_last(node):
+ if node.type == 'decorated':
+ if 'async' in node.children[0].type:
+ node = node.children[1]
+ else:
+ node = node.children[0]
+ if node.type in _EXECUTE_NODES and not last_added:
+ yield node
+ else:
+ for child in node.children:
+ if hasattr(child, 'children'):
+ for result in check_last(child):
+ yield result
+
+ for node in node.children:
+ if node.type == 'suite':
+ for child in node.children:
+ if child.type in _FLOW_KEYWORDS:
+ # Try/except/else/finally.
+ for result in get_executable_nodes(child, last_added=True):
+ yield result
+ else:
+ for result in get_executable_nodes(child):
+ yield result
+ else:
+ if hasattr(node, 'children'):
+ for result in check_last(node):
+ yield result
def for_stmt_defines_one_name(for_stmt):
"""
@@ -22,11 +123,20 @@ def for_stmt_defines_one_name(for_stmt):
:returns: bool
"""
- pass
+ exprlist = for_stmt.children[1]
+ return len(exprlist.children) == 1 and exprlist.children[0].type == 'name'
def clean_scope_docstring(scope_node):
""" Returns a cleaned version of the docstring token. """
- pass
+ node = scope_node.get_doc_node()
+ if node is None:
+ return ''
+
+ if node.type == 'string':
+ cleaned = cleandoc(literal_eval(node.value))
+ else:
+ cleaned = cleandoc(node.value)
+ return cleaned
def get_signature(funcdef, width=72, call_string=None, omit_first_param=False, omit_return_annotation=False):
"""
@@ -39,26 +149,79 @@ def get_signature(funcdef, width=72, call_string=None, omit_first_param=False, o
:rtype: str
"""
- pass
+ if call_string is None:
+ call_string = funcdef.name.value
+
+ params = funcdef.get_params()
+ if omit_first_param and params:
+ params = params[1:]
+
+ param_strs = []
+ for p in params:
+ code = p.get_code().strip()
+ # Remove comments:
+ comment_start = code.find('#')
+ if comment_start != -1:
+ code = code[:comment_start].strip()
+ param_strs.append(code)
+ param_str = ', '.join(param_strs)
+
+ return_annotation = ''
+ if not omit_return_annotation:
+ return_annotation = funcdef.annotation
+ if return_annotation:
+ return_annotation = ' -> ' + return_annotation.get_code()
+
+ code = call_string + '(' + param_str + ')' + return_annotation
+
+ if len(code) > width:
+ # Try to shorten the code
+ code = call_string + '(\n ' + ',\n '.join(param_strs) + '\n)' + return_annotation
+ return code
def move(node, line_offset):
"""
Move the `Node` start_pos.
"""
- pass
+ node.start_pos = (node.start_pos[0] + line_offset, node.start_pos[1])
+ node.end_pos = (node.end_pos[0] + line_offset, node.end_pos[1])
+
+ for child in node.children:
+ move(child, line_offset)
def get_following_comment_same_line(node):
"""
returns (as string) any comment that appears on the same line,
after the node, including the #
"""
- pass
+ end_pos = node.end_pos
+ end_line = end_pos[0]
+ end_col = end_pos[1]
+
+ prefix = node.get_following_whitespace()
+ lines = split_lines(prefix, keepends=True)
+ if not lines:
+ return None
+
+ first_line = lines[0]
+ comment_start = first_line.find('#')
+ if comment_start == -1:
+ return None
+
+ return first_line[comment_start:].rstrip('\n\r')
def get_parent_scope(node, include_flows=False):
"""
Returns the underlying scope.
"""
- pass
+ scope = node.parent
+ while scope is not None:
+ if include_flows and scope.type in ('if_stmt', 'for_stmt', 'while_stmt', 'try_stmt'):
+ return scope
+ if scope.type in ('classdef', 'funcdef', 'file_input'):
+ return scope
+ scope = scope.parent
+ return None
get_cached_parent_scope = _get_parent_scope_cache(get_parent_scope)
def get_cached_code_lines(grammar, path):
@@ -66,7 +229,8 @@ def get_cached_code_lines(grammar, path):
Basically access the cached code lines in parso. This is not the nicest way
to do this, but we avoid splitting all the lines again.
"""
- pass
+ module_node = parser_cache[grammar._hashed][path]
+ return module_node.lines
def get_parso_cache_node(grammar, path):
"""
@@ -76,19 +240,33 @@ def get_parso_cache_node(grammar, path):
The reason for this is mostly caching. This is obviously also a sign of a
broken caching architecture.
"""
- pass
+ return parser_cache[grammar._hashed][path]
def cut_value_at_position(leaf, position):
"""
Cuts of the value of the leaf at position
"""
- pass
+ if leaf.type == 'string':
+ matches = re.match(r'(\'{3}|"{3}|\'|")', leaf.value)
+ quote = matches.group(0)
+ if leaf.line == position[0] and position[1] < leaf.column + len(quote):
+ return ''
+ return leaf.value[:position[1] - leaf.column]
def expr_is_dotted(node):
"""
Checks if a path looks like `name` or `name.foo.bar` and not `name()`.
"""
- pass
-function_is_staticmethod = _function_is_x_method('staticmethod')
-function_is_classmethod = _function_is_x_method('classmethod')
-function_is_property = _function_is_x_method('property', 'cached_property')
\ No newline at end of file
+ if node.type == 'name':
+ return True
+ if node.type == 'atom' and node.children[0].value == '(':
+ return False
+ if node.type == 'atom_expr':
+ if node.children[-1].type == 'trailer' and node.children[-1].children[0].value == '(':
+ return False
+ return True
+ if node.type == 'power':
+ if node.children[-1].type == 'trailer' and node.children[-1].children[0].value == '(':
+ return False
+ return True
+ return False
\ No newline at end of file