[submodule "mod_auth_kerb"]
path = mod_auth_kerb
- url = ../mod_auth_kerb.git
+ url = http://www.project-moonshot.org/git/mod_auth_kerb.git
[submodule "radsecproxy"]
path = libradsec
url = git://git.nordu.net/radsecproxy.git
[submodule "shibboleth/opensaml2"]
path = shibboleth/opensaml2
- url = ../shibboleth/opensaml2.git
+ url = http://www.project-moonshot.org/git/shibboleth/opensaml2.git
[submodule "shibboleth/sp"]
path = shibboleth/sp
- url = ../shibboleth/sp.git
+ url = http://www.project-moonshot.org/git/shibboleth/sp.git
[submodule "shibboleth/xmltooling"]
path = shibboleth/xmltooling
- url = ../shibboleth/xmltooling.git
+ url = http://www.project-moonshot.org/git/shibboleth/xmltooling.git
[submodule "cyrus-sasl"]
path = cyrus-sasl
- url = ../cyrus-sasl
+ url = http://www.project-moonshot.org/git/cyrus-sasl
[submodule "shibboleth/resolver"]
path = shibboleth/resolver
- url = ../shibboleth/resolver.git
+ url = http://www.project-moonshot.org/git/shibboleth/resolver.git
[submodule "libeap"]
path = libeap
- url = ../libeap.git
+ url = http://www.project-moonshot.org/git/libeap.git
+[submodule "freeradius"]
+ path = freeradius
+ url = http://www.project-moonshot.org/git/freeradius.git
--- /dev/null
+#! /usr/bin/python
+
+# This script expects one line for each new revision on the form
+# <oldrev> <newrev> <refname>
+#
+# For example:
+# aa453216d1b3e49e7f6f98441fa56946ddcd6a20
+# 68f7abf4e6f922807889f52bc043ecd31b79f814 refs/heads/master
+#
+# Each of these changes will be passed to the buildbot server along
+# with any other change information we manage to extract from the
+# repository.
+#
+# This script is meant to be run from hooks/post-receive in the git
+# repository. It can also be run at client side with hooks/post-merge
+# after using this wrapper:
+
+#!/bin/sh
+# PRE=$(git rev-parse 'HEAD@{1}')
+# POST=$(git rev-parse HEAD)
+# SYMNAME=$(git rev-parse --symbolic-full-name HEAD)
+# echo "$PRE $POST $SYMNAME" | git_buildbot.py
+#
+# Largely based on contrib/hooks/post-receive-email from git.
+
+import commands
+import logging
+import os
+import exceptions
+import re
+import sys
+
+from twisted.spread import pb
+from twisted.cred import credentials
+from twisted.internet import reactor
+
+from buildbot.scripts import runner
+from optparse import OptionParser
+
+# Modify this to fit your setup, or pass in --master server:host on the
+# command line
+
+master = "localhost:9989"
+
+# When sending the notification, send this category iff
+# it's set (via --category)
+
+category = None
+
+
+# The GIT_DIR environment variable must have been set up so that any
+# git commands that are executed will operate on the repository we're
+# installed in.
+
+changes = []
+
+
+def connectFailed(error):
+ logging.error("Could not connect to %s: %s"
+ % (master, error.getErrorMessage()))
+ return error
+
+
+def addChange(remote, changei):
+ logging.debug("addChange %s, %s" % (repr(remote), repr(changei)))
+ try:
+ c = changei.next()
+ except StopIteration:
+ remote.broker.transport.loseConnection()
+ return None
+
+ logging.info("New revision: %s" % c['revision'][:8])
+ for key, value in c.iteritems():
+ logging.debug(" %s: %s" % (key, value))
+
+ d = remote.callRemote('addChange', c)
+
+ # tail recursion in Twisted can blow out the stack, so we
+ # insert a callLater to delay things
+ def recurseLater(x):
+ reactor.callLater(0, addChange, remote, changei)
+ d.addCallback(recurseLater)
+ return d
+
+
+def connected(remote):
+ return addChange(remote, changes.__iter__())
+
+def add_package(c, files, rev):
+ packages_str = commands.getoutput("git show %s:source_packages" % rev)
+ packages = packages_str.split("\n")
+ packages_found = {}
+ class NoPackage(exceptions.Exception): pass
+ try:
+ for f in files:
+ found = False
+ for p in packages:
+ if f.startswith(p):
+ packages_found[p] = True
+ found = True
+ break
+ if not found: raise NoPackage()
+ except NoPackage:
+ return
+ c["properties"] = {
+ "package": " ".join(packages_found.keys())
+ }
+
+
+def grab_commit_info(c, rev):
+ # Extract information about committer and files using git show
+ f = os.popen("git show --raw --pretty=full %s" % rev, 'r')
+
+ files = []
+
+ while True:
+ line = f.readline()
+ if not line:
+ break
+
+ m = re.match(r"^:.*[MAD]\s+(.+)$", line)
+ if m:
+ logging.debug("Got file: %s" % m.group(1))
+ files.append(m.group(1))
+ continue
+
+ m = re.match(r"^Author:\s+(.+)$", line)
+ if m:
+ logging.debug("Got author: %s" % m.group(1))
+ c['who'] = m.group(1)
+
+ if re.match(r"^Merge: .*$", line):
+ files.append('merge')
+
+ c['files'] = files
+ status = f.close()
+ if status:
+ logging.warning("git show exited with status %d" % status)
+ add_package(c, files, rev)
+
+def gen_changes(input, branch):
+ while True:
+ line = input.readline()
+ if not line:
+ break
+
+ logging.debug("Change: %s" % line)
+
+ m = re.match(r"^([0-9a-f]+) (.*)$", line.strip())
+ c = {'revision': m.group(1),
+ 'comments': m.group(2),
+ 'branch': branch,
+ }
+ if category:
+ c['category'] = category
+ grab_commit_info(c, m.group(1))
+ changes.append(c)
+
+
+def gen_create_branch_changes(newrev, refname, branch):
+ # A new branch has been created. Generate changes for everything
+ # up to `newrev' which does not exist in any branch but `refname'.
+ #
+ # Note that this may be inaccurate if two new branches are created
+ # at the same time, pointing to the same commit, or if there are
+ # commits that only exists in a common subset of the new branches.
+
+ logging.info("Branch `%s' created" % branch)
+
+ f = os.popen("git rev-parse --not --branches"
+ + "| grep -v $(git rev-parse %s)" % refname
+ + "| git rev-list --reverse --pretty=oneline --stdin %s" % newrev,
+ 'r')
+
+ gen_changes(f, branch)
+
+ status = f.close()
+ if status:
+ logging.warning("git rev-list exited with status %d" % status)
+
+
+def gen_update_branch_changes(oldrev, newrev, refname, branch):
+ # A branch has been updated. If it was a fast-forward update,
+ # generate Change events for everything between oldrev and newrev.
+ #
+ # In case of a forced update, first generate a "fake" Change event
+ # rewinding the branch to the common ancestor of oldrev and
+ # newrev. Then, generate Change events for each commit between the
+ # common ancestor and newrev.
+
+ logging.info("Branch `%s' updated %s .. %s"
+ % (branch, oldrev[:8], newrev[:8]))
+
+ baserev = commands.getoutput("git merge-base %s %s" % (oldrev, newrev))
+ logging.debug("oldrev=%s newrev=%s baserev=%s" % (oldrev, newrev, baserev))
+ if baserev != oldrev:
+ c = {'revision': baserev,
+ 'comments': "Rewind branch",
+ 'branch': branch,
+ 'who': "dummy",
+ }
+ logging.info("Branch %s was rewound to %s" % (branch, baserev[:8]))
+ files = []
+ f = os.popen("git diff --raw %s..%s" % (oldrev, baserev), 'r')
+ while True:
+ line = f.readline()
+ if not line:
+ break
+
+ file = re.match(r"^:.*[MAD]\s*(.+)$", line).group(1)
+ logging.debug(" Rewound file: %s" % file)
+ files.append(file)
+
+ status = f.close()
+ if status:
+ logging.warning("git diff exited with status %d" % status)
+
+ if category:
+ c['category'] = category
+
+ if files:
+ c['files'] = files
+ changes.append(c)
+
+ if newrev != baserev:
+ # Not a pure rewind
+ f = os.popen("git rev-list --reverse --pretty=oneline %s..%s"
+ % (baserev, newrev), 'r')
+ gen_changes(f, branch)
+
+ status = f.close()
+ if status:
+ logging.warning("git rev-list exited with status %d" % status)
+
+
+def cleanup(res):
+ reactor.stop()
+
+
+def process_changes():
+ # Read branch updates from stdin and generate Change events
+ while True:
+ line = sys.stdin.readline()
+ if not line:
+ break
+
+ [oldrev, newrev, refname] = line.split(None, 2)
+
+ # We only care about regular heads, i.e. branches
+ m = re.match(r"^refs\/heads\/(.+)$", refname)
+ if not m:
+ logging.info("Ignoring refname `%s': Not a branch" % refname)
+ continue
+
+ branch = m.group(1)
+
+ # Find out if the branch was created, deleted or updated. Branches
+ # being deleted aren't really interesting.
+ if re.match(r"^0*$", newrev):
+ logging.info("Branch `%s' deleted, ignoring" % branch)
+ continue
+ elif re.match(r"^0*$", oldrev):
+ gen_create_branch_changes(newrev, refname, branch)
+ else:
+ gen_update_branch_changes(oldrev, newrev, refname, branch)
+
+ # Submit the changes, if any
+ if not changes:
+ logging.warning("No changes found")
+ return
+
+ host, port = master.split(':')
+ port = int(port)
+
+ f = pb.PBClientFactory()
+ d = f.login(credentials.UsernamePassword("change", "changepw"))
+ reactor.connectTCP(host, port, f)
+
+ d.addErrback(connectFailed)
+ d.addCallback(connected)
+ d.addBoth(cleanup)
+
+ reactor.run()
+
+
+def parse_options():
+ parser = OptionParser()
+ parser.add_option("-l", "--logfile", action="store", type="string",
+ help="Log to the specified file")
+ parser.add_option("-v", "--verbose", action="count",
+ help="Be more verbose. Ignored if -l is not specified.")
+ master_help = ("Build master to push to. Default is %(master)s" %
+ { 'master' : master })
+ parser.add_option("-m", "--master", action="store", type="string",
+ help=master_help)
+ parser.add_option("-c", "--category", action="store",
+ type="string", help="Scheduler category to notify.")
+ options, args = parser.parse_args()
+ return options
+
+
+# Log errors and critical messages to stderr. Optionally log
+# information to a file as well (we'll set that up later.)
+stderr = logging.StreamHandler(sys.stderr)
+fmt = logging.Formatter("git_buildbot: %(levelname)s: %(message)s")
+stderr.setLevel(logging.ERROR)
+stderr.setFormatter(fmt)
+logging.getLogger().addHandler(stderr)
+logging.getLogger().setLevel(logging.DEBUG)
+
+try:
+ options = parse_options()
+ level = logging.WARNING
+ if options.verbose:
+ level -= 10 * options.verbose
+ if level < 0:
+ level = 0
+
+ if options.logfile:
+ logfile = logging.FileHandler(options.logfile)
+ logfile.setLevel(level)
+ fmt = logging.Formatter("%(asctime)s %(levelname)s: %(message)s")
+ logfile.setFormatter(fmt)
+ logging.getLogger().addHandler(logfile)
+
+ if options.master:
+ master=options.master
+
+ if options.category:
+ category = options.category
+
+ process_changes()
+except SystemExit:
+ pass
+except:
+ logging.exception("Unhandled exception")
+ sys.exit(1)
--- /dev/null
+#!/usr/bin/python
+
+'''A script to buildMoonshot potentially using a schroot for install testing.
+'''
+
+from contextlib import contextmanager
+import os, subprocess, exceptions
+import re
+import sys
+from optparse import OptionParser
+from shutil import copy
+
+
+
+# These variables can be overridden by options. If packages is not
+# set, then it is read from the source_packages file
+packages = [] # Set of packages to build
+prefix = "/usr/local/moonshot"
+root_command = "fakeroot"
+
+schroot_command = ""
+
+class CommandError(exceptions.StandardError):
+ pass
+
+class Schroot(object):
+ '''Represents a schroot used for building moonshot.'''
+
+ def __init__(self, name):
+ '''Initialize a new schroot option from the named
+ schroot. Unless the named schroot starts with session:, then a
+ new session schroot is created.'''
+ if not name.startswith('session:'):
+ self.name = command_output(('schroot', '-b',
+ '-c', name))
+ self.end_session = True
+ else:
+ self.name = name
+ self.end_session = False
+
+ def __del__(self):
+ if self.end_session:
+ try:
+ run_cmd(('schroot', '-e', '-c', self.name))
+ except CommandError: pass
+
+@contextmanager
+def current_directory(dir):
+ "Change the current directory as a context manager; when the context exits, return."
+ cwd = os.getcwd()
+ os.chdir(dir)
+ yield
+ os.chdir(cwd)
+
+
+def run_cmd(args, **kwords):
+ rcode = subprocess.call( args, **kwords)
+ if rcode <> 0:
+ raise CommandError(args)
+
+def command_output(args) :
+ p = subprocess.Popen(args, stdout=subprocess.PIPE)
+ output = p.communicate()
+ output = output[0]
+ if p.returncode != 0:
+ raise CommandError(args)
+ return output.strip()
+
+def build(package):
+ with current_directory(package):
+ run_cmd(('autoreconf', '-i', '-f'))
+ configure_command = ' '.join([
+ './configure'] + configure_opts)
+ if len(schroot_command) > 0:
+ configure_command = schroot_command + " -- " \
+ + configure_command
+ print configure_command
+ sys.stdout.flush()
+ run_cmd(configure_command, shell=True)
+ run_cmd(schroot_command + ' make', shell=True)
+
+def make_install(package):
+ with current_directory(package):
+ install_command = root_command + " make install"
+ print install_command
+ sys.stdout.flush()
+ run_cmd(install_command, shell=True)
+
+
+
+def read_packages():
+ '''Read in the packages file from source_packages
+ '''
+ try: pf = file("source_packages")
+ except IOError:
+ print "Error: source_packages file not found"
+ exit(1)
+ def is_comment(line):
+ if re.match("^\\s*#", line): return False
+ if "#" in line: raise ValueError(
+ "Source package line contains a comment but not at beginning")
+ return True
+ return map(lambda(x): x.rstrip(),
+ filter(is_comment, pf.readlines()))
+
+
+# main program
+opt = OptionParser()
+opt.add_option('--prefix',
+ dest="prefix", default=prefix,
+ help="Set the prefix under which packages are built and"
+ + "installed")
+opt.add_option('-c', '--configure-opt', dest="configure_opts",
+ action="append",
+ help="Specify an option to pass to configure")
+opt.add_option('-r', '--root-cmd', dest="root_command",
+ default=root_command,
+ help="Specify command to gain root for make install")
+opt.add_option('-s', '--schroot',
+ dest="schroot",
+ help="Specify name of schroot to use for build;"
+ "implicitly sets root_command")
+opt.usage = "%prog [options] [packages]"
+(options, packages) = opt.parse_args()
+prefix = options.prefix
+root_command = options.root_command
+configure_opts = ['--prefix', prefix,
+ "LDFLAGS='-Wl,-L"+prefix+"/lib"
+ + " -Wl,-R"+prefix+"/lib'",
+ 'CPPFLAGS="-I '+prefix+'/include"',
+ '--with-system-libtool', '--with-system-libltdl',
+ ]
+if options.configure_opts is not None:
+ configure_opts.extend(options.configure_opts)
+
+our_schroot = None
+if options.schroot is not None:
+ our_schroot = Schroot(options.schroot)
+ schroot_command = "schroot -r -c " + our_schroot.name
+ root_command = schroot_command + " -u root"
+
+all_packages = read_packages()
+if len(packages) == 0: packages = all_packages
+
+os.umask(022)
+
+try:
+ for p in all_packages:
+ if p in packages: build(p)
+ make_install(p)
+except CommandError as c:
+ print "Error:" + str(c.args)
+ our_schroot = None
+ exit(1)
+finally: del our_schroot
+
+
+
-Subproject commit 2d5a0004454cee27bd0e4842404fd5d0f3e23ef2
+Subproject commit 755d9d20aadc1a140d01583b586f5a6d29c3a670
--- /dev/null
+Subproject commit ff3e93f74a06d0bfbd662058123864cc2f96df40
-Subproject commit 88adb3a99af7ac04c5f07174acdd54fbf13fee01
+Subproject commit b07f9ade9bad2ec38d4dc9f6592f64dff2e1b120
gss_internal_release_oid(OM_uint32 *minor,
gss_OID *oid)
{
- gss_OID internalizedOid = GSS_C_NO_OID;
-
- *minor = 0;
-
- if (gssEapInternalizeOid(*oid, &internalizedOid)) {
- /* OID was internalized, so we can mark it as "freed" */
- *oid = GSS_C_NO_OID;
- return GSS_S_COMPLETE;
- }
-
- /* we don't know about this OID */
- return GSS_S_CONTINUE_NEEDED;
+ return gssEapReleaseOid(minor, oid);
}
gss_OID *const pInternalizedOid);
OM_uint32
+gssEapReleaseOid(OM_uint32 *minor, gss_OID *oid);
+
+OM_uint32
gssEapDefaultMech(OM_uint32 *minor,
gss_OID *oid);
krb5_free_keyblock_contents(krbContext, &ctx->rfc3961Key);
gssEapReleaseName(&tmpMinor, &ctx->initiatorName);
gssEapReleaseName(&tmpMinor, &ctx->acceptorName);
- gss_release_oid(&tmpMinor, &ctx->mechanismUsed);
+ gssEapReleaseOid(&tmpMinor, &ctx->mechanismUsed);
sequenceFree(&tmpMinor, &ctx->seqState);
gssEapReleaseCred(&tmpMinor, &ctx->defaultCred);
KrbCredInfo krbCredInfo;
krb5_keyblock *key;
krb5_crypto krbCrypto = NULL;
- unsigned char *buf = NULL;
- size_t buf_size, len;
+ krb5_data credInfoData = { 0 };
+ size_t len;
#else
krb5_data *d = NULL;
#endif
krbCredInfo.sname = &creds->server->name;
krbCredInfo.caddr = creds->addresses.len ? &creds->addresses : NULL;
- ASN1_MALLOC_ENCODE(KrbCredInfo, buf, buf_size, &krbCredInfo, &len, code);
+ ASN1_MALLOC_ENCODE(KrbCredInfo, credInfoData.data, credInfoData.length,
+ &krbCredInfo, &len, code);
if (code != 0)
goto cleanup;
code = krb5_encrypt_EncryptedData(krbContext,
krbCrypto,
KRB5_KU_KRB_CRED,
- buf,
- len,
+ credInfoData.data,
+ credInfoData.length,
0,
&krbCred.enc_part);
if (code != 0)
goto cleanup;
- GSSEAP_FREE(buf);
- buf = NULL;
-
- ASN1_MALLOC_ENCODE(KRB_CRED, buf, buf_size, &krbCred, &len, code);
+ ASN1_MALLOC_ENCODE(KRB_CRED, data->data, data->length,
+ &krbCred, &len, code);
if (code != 0)
goto cleanup;
cleanup:
- if (buf != NULL)
- GSSEAP_FREE(buf);
if (krbCrypto != NULL)
krb5_crypto_destroy(krbContext, krbCrypto);
free_KRB_CRED(&krbCred);
+ krb5_data_free(&credInfoData);
return code;
#else
return 1;
}
+OM_uint32
+gssEapReleaseOid(OM_uint32 *minor, gss_OID *oid)
+{
+ gss_OID internalizedOid = GSS_C_NO_OID;
+
+ *minor = 0;
+
+ if (gssEapInternalizeOid(*oid, &internalizedOid)) {
+ /* OID was internalized, so we can mark it as "freed" */
+ *oid = GSS_C_NO_OID;
+ return GSS_S_COMPLETE;
+ }
+
+ /* we don't know about this OID */
+ return GSS_S_CONTINUE_NEEDED;
+}
+
static gss_buffer_desc gssEapSaslMechs[] = {
{ sizeof("EAP") - 1, "EAP", }, /* not used */
{ sizeof("EAP-AES128") - 1, "EAP-AES128" },
EncTicketPart enc_part;
AuthorizationData authData = { 0 };
krb5_crypto krbCrypto = NULL;
- unsigned char *buf = NULL;
- size_t buf_size, len;
+ krb5_data ticketData = { 0 };
+ krb5_data encPartData = { 0 };
+ size_t len;
#else
krb5_ticket ticket;
krb5_enc_tkt_part enc_part;
+ krb5_data *ticketData = NULL;
#endif
- krb5_data *ticketData = NULL, credsData = { 0 };
+ krb5_data credsData = { 0 };
krb5_creds creds = { 0 };
krb5_auth_context authContext = NULL;
* Generate a random session key to place in the ticket and
* sign the "KDC-Issued" authorization data element.
*/
- code = krb5_c_make_random_key(krbContext, ctx->encryptionType,
- &session);
+#ifdef HAVE_HEIMDAL_VERSION
+ code = krb5_generate_random_keyblock(krbContext, ctx->encryptionType,
+ &session);
if (code != 0)
goto cleanup;
-#ifdef HAVE_HEIMDAL_VERSION
enc_part.flags.initial = 1;
enc_part.key = session;
enc_part.crealm = ctx->initiatorName->krbPrincipal->realm;
if (GSS_ERROR(major))
goto cleanup;
- ASN1_MALLOC_ENCODE(EncTicketPart, buf, buf_size, &enc_part, &len, code);
+ ASN1_MALLOC_ENCODE(EncTicketPart, encPartData.data, encPartData.length,
+ &enc_part, &len, code);
if (code != 0)
goto cleanup;
code = krb5_encrypt_EncryptedData(krbContext,
krbCrypto,
KRB5_KU_TICKET,
- buf,
- len,
+ encPartData.data,
+ encPartData.length,
0,
&ticket.enc_part);
if (code != 0)
goto cleanup;
- GSSEAP_FREE(buf);
- buf = NULL;
-
- ASN1_MALLOC_ENCODE(Ticket, buf, buf_size, &ticket, &len, code);
+ ASN1_MALLOC_ENCODE(Ticket, ticketData.data, ticketData.length,
+ &ticket, &len, code);
if (code != 0)
goto cleanup;
#else
+ code = krb5_c_make_random_key(krbContext, ctx->encryptionType,
+ &session);
+ if (code != 0)
+ goto cleanup;
+
enc_part.flags = TKT_FLG_INITIAL;
enc_part.session = &session;
enc_part.client = ctx->initiatorName->krbPrincipal;
creds.times.endtime = enc_part.endtime;
creds.times.renew_till = 0;
creds.flags.b = enc_part.flags;
- creds.ticket = *ticketData;
+ creds.ticket = ticketData;
creds.authdata = authData;
#else
creds.keyblock = session;
#ifdef HAVE_HEIMDAL_VERSION
if (krbCrypto != NULL)
krb5_crypto_destroy(krbContext, krbCrypto);
- if (buf != NULL)
- GSSEAP_FREE(buf);
free_AuthorizationData(&authData);
free_EncryptedData(&ticket.enc_part);
+ krb5_data_free(&ticketData);
+ krb5_data_free(&encPartData);
#else
krb5_free_authdata(krbContext, enc_part.authorization_data);
if (ticket.enc_part.ciphertext.data != NULL)
GSSEAP_FREE(ticket.enc_part.ciphertext.data);
+ krb5_free_data(krbContext, ticketData);
#endif
krb5_free_keyblock_contents(krbContext, &session);
krb5_free_principal(krbContext, server);
krb5_free_keyblock_contents(krbContext, &acceptorKey);
- krb5_free_data(krbContext, ticketData);
krb5_auth_con_free(krbContext, authContext);
if (major == GSS_S_COMPLETE) {
NEXT_SYMBOL(gssDisplayNameNext, "gss_display_name");
NEXT_SYMBOL(gssImportNameNext, "gss_import_name");
NEXT_SYMBOL(gssStoreCredNext, "gss_store_cred");
+#ifndef HAVE_HEIMDAL_VERSION
NEXT_SYMBOL(gssGetNameAttributeNext, "gss_get_name_attribute");
+#endif
return major;
}
--- /dev/null
+# This file contains a list of directories containing
+# Debian source packages one per line.
+# Lines starting with # are comments
+shibboleth/xmltooling
+shibboleth/opensaml2
+shibboleth/sp
+shibboleth/resolver
+freeradius