Alien-SVN

 view release on metacpan or  search on metacpan

src/subversion/build/transform_sql.py  view on Meta::CPAN

        self.output.write('  "%s " \\\n' % line.rstrip())

    # previous line had a continuation. end the madness.
    self.close_define()

  def close_define(self):
    if self.var_printed:
      self.output.write(DEFINE_END)
      self.var_printed = False


class NonRewritableDict(dict):
  """A dictionary that does not allow self[k]=v when k in self
  (unless v is equal to the stored value).

  (An entry would have to be explicitly deleted before a new value
  may be entered.)
  """

  def __setitem__(self, key, val):
    if self.__contains__(key) and self.__getitem__(key) != val:
      raise Exception("Can't re-insert key %r with value %r "
                      "(already present with value %r)"
                      % (key, val, self.__getitem__(key)))
    super(NonRewritableDict, self).__setitem__(key, val)

def hotspots(fd):
  hotspot = False
  for line in fd:
    # hotspot is TRUE within definitions of static const svn_token_map_t[].
    hotspot ^= int(('svn_token_map_t', '\x7d;')[hotspot] in line)
    if hotspot:
      yield line

def extract_token_map(filename):
  try:
    fd = open(filename)
  except IOError:
    return {}

  pattern = re.compile(r'"(.*?)".*?(MAP_\w*)')
  return \
    NonRewritableDict(
      map(operator.itemgetter(1,0),
        map(operator.methodcaller('groups'),
          filter(None,
            map(pattern.search,
              hotspots(fd))))))

def main(input_filepath, output):
  filename = os.path.basename(input_filepath)
  input = open(input_filepath, 'r').read()

  token_map_filename = os.path.dirname(input_filepath) + '/token-map.h'
  token_map = extract_token_map(token_map_filename)

  var_name = re.sub('[-.]', '_', filename).upper()

  output.write(
    '/* This file is automatically generated from %s and %s.\n'
    ' * Do not edit this file -- edit the source and rerun gen-make.py */\n'
    '\n'
    % (filename, token_map_filename))

  proc = Processor(os.path.dirname(input_filepath), output, var_name, token_map)
  proc.process_file(input)

  ### the STMT_%d naming precludes *multiple* transform_sql headers from
  ### being used within the same .c file. for now, that's more than fine.
  ### in the future, we can always add a var_name discriminator or use
  ### the statement name itself (which should hopefully be unique across
  ### all names in use; or can easily be made so)
  if proc.stmt_count > 0:
    output.write(
      '#define %s_DECLARE_STATEMENTS(varname) \\\n' % (var_name,)
      + '  static const char * const varname[] = { \\\n'
      + ', \\\n'.join('    STMT_%d' % (i,) for i in range(proc.stmt_count))
      + ', \\\n    NULL \\\n  }\n')

    output.write('\n')

    output.write(
      '#define %s_DECLARE_STATEMENT_INFO(varname) \\\n' % (var_name,)
      + '  static const char * const varname[][2] = { \\\n'
      + ', \\\n'.join('    STMT_%d_INFO' % (i) for i in range(proc.stmt_count))
      + ', \\\n    {NULL, NULL} \\\n  }\n')

if __name__ == '__main__':
  if len(sys.argv) < 2 or len(sys.argv) > 3:
    usage_and_exit('Incorrect number of arguments')

  # Note: we could use stdin, but then we'd have no var_name
  input_filepath = sys.argv[1]

  if len(sys.argv) > 2:
    output_file = open(sys.argv[2], 'w')
  else:
    output_file = sys.stdout

  main(input_filepath, output_file)



( run in 0.919 second using v1.01-cache-2.11-cpan-411bb0df24b )