mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Merge branch 'master' into bayesian
This commit is contained in:
25
test/fixtures/c/blob.c
vendored
Normal file
25
test/fixtures/c/blob.c
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
#include "cache.h"
|
||||
#include "blob.h"
|
||||
|
||||
const char *blob_type = "blob";
|
||||
|
||||
struct blob *lookup_blob(const unsigned char *sha1)
|
||||
{
|
||||
struct object *obj = lookup_object(sha1);
|
||||
if (!obj)
|
||||
return create_object(sha1, OBJ_BLOB, alloc_blob_node());
|
||||
if (!obj->type)
|
||||
obj->type = OBJ_BLOB;
|
||||
if (obj->type != OBJ_BLOB) {
|
||||
error("Object %s is a %s, not a blob",
|
||||
sha1_to_hex(sha1), typename(obj->type));
|
||||
return NULL;
|
||||
}
|
||||
return (struct blob *) obj;
|
||||
}
|
||||
|
||||
int parse_blob_buffer(struct blob *item, void *buffer, unsigned long size)
|
||||
{
|
||||
item->object.parsed = 1;
|
||||
return 0;
|
||||
}
|
||||
99
test/fixtures/c/cache.c
vendored
Normal file
99
test/fixtures/c/cache.c
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
/*
|
||||
* Copyright (C) 2009-2012 the libgit2 contributors
|
||||
*
|
||||
* This file is part of libgit2, distributed under the GNU GPL v2 with
|
||||
* a Linking Exception. For full terms see the included COPYING file.
|
||||
*/
|
||||
|
||||
#include "common.h"
|
||||
#include "repository.h"
|
||||
#include "commit.h"
|
||||
#include "thread-utils.h"
|
||||
#include "util.h"
|
||||
#include "cache.h"
|
||||
|
||||
int git_cache_init(git_cache *cache, size_t size, git_cached_obj_freeptr free_ptr)
|
||||
{
|
||||
if (size < 8)
|
||||
size = 8;
|
||||
size = git__size_t_powerof2(size);
|
||||
|
||||
cache->size_mask = size - 1;
|
||||
cache->lru_count = 0;
|
||||
cache->free_obj = free_ptr;
|
||||
|
||||
git_mutex_init(&cache->lock);
|
||||
|
||||
cache->nodes = git__malloc(size * sizeof(git_cached_obj *));
|
||||
GITERR_CHECK_ALLOC(cache->nodes);
|
||||
|
||||
memset(cache->nodes, 0x0, size * sizeof(git_cached_obj *));
|
||||
return 0;
|
||||
}
|
||||
|
||||
void git_cache_free(git_cache *cache)
|
||||
{
|
||||
size_t i;
|
||||
|
||||
for (i = 0; i < (cache->size_mask + 1); ++i) {
|
||||
if (cache->nodes[i] != NULL)
|
||||
git_cached_obj_decref(cache->nodes[i], cache->free_obj);
|
||||
}
|
||||
|
||||
git__free(cache->nodes);
|
||||
}
|
||||
|
||||
void *git_cache_get(git_cache *cache, const git_oid *oid)
|
||||
{
|
||||
uint32_t hash;
|
||||
git_cached_obj *node = NULL, *result = NULL;
|
||||
|
||||
memcpy(&hash, oid->id, sizeof(hash));
|
||||
|
||||
git_mutex_lock(&cache->lock);
|
||||
{
|
||||
node = cache->nodes[hash & cache->size_mask];
|
||||
|
||||
if (node != NULL && git_oid_cmp(&node->oid, oid) == 0) {
|
||||
git_cached_obj_incref(node);
|
||||
result = node;
|
||||
}
|
||||
}
|
||||
git_mutex_unlock(&cache->lock);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
void *git_cache_try_store(git_cache *cache, void *_entry)
|
||||
{
|
||||
git_cached_obj *entry = _entry;
|
||||
uint32_t hash;
|
||||
|
||||
memcpy(&hash, &entry->oid, sizeof(uint32_t));
|
||||
|
||||
/* increase the refcount on this object, because
|
||||
* the cache now owns it */
|
||||
git_cached_obj_incref(entry);
|
||||
|
||||
git_mutex_lock(&cache->lock);
|
||||
{
|
||||
git_cached_obj *node = cache->nodes[hash & cache->size_mask];
|
||||
|
||||
if (node == NULL) {
|
||||
cache->nodes[hash & cache->size_mask] = entry;
|
||||
} else if (git_oid_cmp(&node->oid, &entry->oid) == 0) {
|
||||
git_cached_obj_decref(entry, cache->free_obj);
|
||||
entry = node;
|
||||
} else {
|
||||
git_cached_obj_decref(node, cache->free_obj);
|
||||
cache->nodes[hash & cache->size_mask] = entry;
|
||||
}
|
||||
}
|
||||
git_mutex_unlock(&cache->lock);
|
||||
|
||||
/* increase the refcount again, because we are
|
||||
* returning it to the user */
|
||||
git_cached_obj_incref(entry);
|
||||
|
||||
return entry;
|
||||
}
|
||||
1228
test/fixtures/c/commit.c
vendored
Normal file
1228
test/fixtures/c/commit.c
vendored
Normal file
File diff suppressed because it is too large
Load Diff
725
test/fixtures/c/cpu.c
vendored
Normal file
725
test/fixtures/c/cpu.c
vendored
Normal file
@@ -0,0 +1,725 @@
|
||||
/* CPU control.
|
||||
* (C) 2001, 2002, 2003, 2004 Rusty Russell
|
||||
*
|
||||
* This code is licenced under the GPL.
|
||||
*/
|
||||
#include <linux/proc_fs.h>
|
||||
#include <linux/smp.h>
|
||||
#include <linux/init.h>
|
||||
#include <linux/notifier.h>
|
||||
#include <linux/sched.h>
|
||||
#include <linux/unistd.h>
|
||||
#include <linux/cpu.h>
|
||||
#include <linux/oom.h>
|
||||
#include <linux/rcupdate.h>
|
||||
#include <linux/export.h>
|
||||
#include <linux/bug.h>
|
||||
#include <linux/kthread.h>
|
||||
#include <linux/stop_machine.h>
|
||||
#include <linux/mutex.h>
|
||||
#include <linux/gfp.h>
|
||||
#include <linux/suspend.h>
|
||||
|
||||
#include "smpboot.h"
|
||||
|
||||
#ifdef CONFIG_SMP
|
||||
/* Serializes the updates to cpu_online_mask, cpu_present_mask */
|
||||
static DEFINE_MUTEX(cpu_add_remove_lock);
|
||||
|
||||
/*
|
||||
* The following two API's must be used when attempting
|
||||
* to serialize the updates to cpu_online_mask, cpu_present_mask.
|
||||
*/
|
||||
void cpu_maps_update_begin(void)
|
||||
{
|
||||
mutex_lock(&cpu_add_remove_lock);
|
||||
}
|
||||
|
||||
void cpu_maps_update_done(void)
|
||||
{
|
||||
mutex_unlock(&cpu_add_remove_lock);
|
||||
}
|
||||
|
||||
static RAW_NOTIFIER_HEAD(cpu_chain);
|
||||
|
||||
/* If set, cpu_up and cpu_down will return -EBUSY and do nothing.
|
||||
* Should always be manipulated under cpu_add_remove_lock
|
||||
*/
|
||||
static int cpu_hotplug_disabled;
|
||||
|
||||
#ifdef CONFIG_HOTPLUG_CPU
|
||||
|
||||
static struct {
|
||||
struct task_struct *active_writer;
|
||||
struct mutex lock; /* Synchronizes accesses to refcount, */
|
||||
/*
|
||||
* Also blocks the new readers during
|
||||
* an ongoing cpu hotplug operation.
|
||||
*/
|
||||
int refcount;
|
||||
} cpu_hotplug = {
|
||||
.active_writer = NULL,
|
||||
.lock = __MUTEX_INITIALIZER(cpu_hotplug.lock),
|
||||
.refcount = 0,
|
||||
};
|
||||
|
||||
void get_online_cpus(void)
|
||||
{
|
||||
might_sleep();
|
||||
if (cpu_hotplug.active_writer == current)
|
||||
return;
|
||||
mutex_lock(&cpu_hotplug.lock);
|
||||
cpu_hotplug.refcount++;
|
||||
mutex_unlock(&cpu_hotplug.lock);
|
||||
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(get_online_cpus);
|
||||
|
||||
void put_online_cpus(void)
|
||||
{
|
||||
if (cpu_hotplug.active_writer == current)
|
||||
return;
|
||||
mutex_lock(&cpu_hotplug.lock);
|
||||
if (!--cpu_hotplug.refcount && unlikely(cpu_hotplug.active_writer))
|
||||
wake_up_process(cpu_hotplug.active_writer);
|
||||
mutex_unlock(&cpu_hotplug.lock);
|
||||
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(put_online_cpus);
|
||||
|
||||
/*
|
||||
* This ensures that the hotplug operation can begin only when the
|
||||
* refcount goes to zero.
|
||||
*
|
||||
* Note that during a cpu-hotplug operation, the new readers, if any,
|
||||
* will be blocked by the cpu_hotplug.lock
|
||||
*
|
||||
* Since cpu_hotplug_begin() is always called after invoking
|
||||
* cpu_maps_update_begin(), we can be sure that only one writer is active.
|
||||
*
|
||||
* Note that theoretically, there is a possibility of a livelock:
|
||||
* - Refcount goes to zero, last reader wakes up the sleeping
|
||||
* writer.
|
||||
* - Last reader unlocks the cpu_hotplug.lock.
|
||||
* - A new reader arrives at this moment, bumps up the refcount.
|
||||
* - The writer acquires the cpu_hotplug.lock finds the refcount
|
||||
* non zero and goes to sleep again.
|
||||
*
|
||||
* However, this is very difficult to achieve in practice since
|
||||
* get_online_cpus() not an api which is called all that often.
|
||||
*
|
||||
*/
|
||||
static void cpu_hotplug_begin(void)
|
||||
{
|
||||
cpu_hotplug.active_writer = current;
|
||||
|
||||
for (;;) {
|
||||
mutex_lock(&cpu_hotplug.lock);
|
||||
if (likely(!cpu_hotplug.refcount))
|
||||
break;
|
||||
__set_current_state(TASK_UNINTERRUPTIBLE);
|
||||
mutex_unlock(&cpu_hotplug.lock);
|
||||
schedule();
|
||||
}
|
||||
}
|
||||
|
||||
static void cpu_hotplug_done(void)
|
||||
{
|
||||
cpu_hotplug.active_writer = NULL;
|
||||
mutex_unlock(&cpu_hotplug.lock);
|
||||
}
|
||||
|
||||
#else /* #if CONFIG_HOTPLUG_CPU */
|
||||
static void cpu_hotplug_begin(void) {}
|
||||
static void cpu_hotplug_done(void) {}
|
||||
#endif /* #else #if CONFIG_HOTPLUG_CPU */
|
||||
|
||||
/* Need to know about CPUs going up/down? */
|
||||
int __ref register_cpu_notifier(struct notifier_block *nb)
|
||||
{
|
||||
int ret;
|
||||
cpu_maps_update_begin();
|
||||
ret = raw_notifier_chain_register(&cpu_chain, nb);
|
||||
cpu_maps_update_done();
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int __cpu_notify(unsigned long val, void *v, int nr_to_call,
|
||||
int *nr_calls)
|
||||
{
|
||||
int ret;
|
||||
|
||||
ret = __raw_notifier_call_chain(&cpu_chain, val, v, nr_to_call,
|
||||
nr_calls);
|
||||
|
||||
return notifier_to_errno(ret);
|
||||
}
|
||||
|
||||
static int cpu_notify(unsigned long val, void *v)
|
||||
{
|
||||
return __cpu_notify(val, v, -1, NULL);
|
||||
}
|
||||
|
||||
#ifdef CONFIG_HOTPLUG_CPU
|
||||
|
||||
static void cpu_notify_nofail(unsigned long val, void *v)
|
||||
{
|
||||
BUG_ON(cpu_notify(val, v));
|
||||
}
|
||||
EXPORT_SYMBOL(register_cpu_notifier);
|
||||
|
||||
void __ref unregister_cpu_notifier(struct notifier_block *nb)
|
||||
{
|
||||
cpu_maps_update_begin();
|
||||
raw_notifier_chain_unregister(&cpu_chain, nb);
|
||||
cpu_maps_update_done();
|
||||
}
|
||||
EXPORT_SYMBOL(unregister_cpu_notifier);
|
||||
|
||||
/**
|
||||
* clear_tasks_mm_cpumask - Safely clear tasks' mm_cpumask for a CPU
|
||||
* @cpu: a CPU id
|
||||
*
|
||||
* This function walks all processes, finds a valid mm struct for each one and
|
||||
* then clears a corresponding bit in mm's cpumask. While this all sounds
|
||||
* trivial, there are various non-obvious corner cases, which this function
|
||||
* tries to solve in a safe manner.
|
||||
*
|
||||
* Also note that the function uses a somewhat relaxed locking scheme, so it may
|
||||
* be called only for an already offlined CPU.
|
||||
*/
|
||||
void clear_tasks_mm_cpumask(int cpu)
|
||||
{
|
||||
struct task_struct *p;
|
||||
|
||||
/*
|
||||
* This function is called after the cpu is taken down and marked
|
||||
* offline, so its not like new tasks will ever get this cpu set in
|
||||
* their mm mask. -- Peter Zijlstra
|
||||
* Thus, we may use rcu_read_lock() here, instead of grabbing
|
||||
* full-fledged tasklist_lock.
|
||||
*/
|
||||
WARN_ON(cpu_online(cpu));
|
||||
rcu_read_lock();
|
||||
for_each_process(p) {
|
||||
struct task_struct *t;
|
||||
|
||||
/*
|
||||
* Main thread might exit, but other threads may still have
|
||||
* a valid mm. Find one.
|
||||
*/
|
||||
t = find_lock_task_mm(p);
|
||||
if (!t)
|
||||
continue;
|
||||
cpumask_clear_cpu(cpu, mm_cpumask(t->mm));
|
||||
task_unlock(t);
|
||||
}
|
||||
rcu_read_unlock();
|
||||
}
|
||||
|
||||
static inline void check_for_tasks(int cpu)
|
||||
{
|
||||
struct task_struct *p;
|
||||
|
||||
write_lock_irq(&tasklist_lock);
|
||||
for_each_process(p) {
|
||||
if (task_cpu(p) == cpu && p->state == TASK_RUNNING &&
|
||||
(p->utime || p->stime))
|
||||
printk(KERN_WARNING "Task %s (pid = %d) is on cpu %d "
|
||||
"(state = %ld, flags = %x)\n",
|
||||
p->comm, task_pid_nr(p), cpu,
|
||||
p->state, p->flags);
|
||||
}
|
||||
write_unlock_irq(&tasklist_lock);
|
||||
}
|
||||
|
||||
struct take_cpu_down_param {
|
||||
unsigned long mod;
|
||||
void *hcpu;
|
||||
};
|
||||
|
||||
/* Take this CPU down. */
|
||||
static int __ref take_cpu_down(void *_param)
|
||||
{
|
||||
struct take_cpu_down_param *param = _param;
|
||||
int err;
|
||||
|
||||
/* Ensure this CPU doesn't handle any more interrupts. */
|
||||
err = __cpu_disable();
|
||||
if (err < 0)
|
||||
return err;
|
||||
|
||||
cpu_notify(CPU_DYING | param->mod, param->hcpu);
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Requires cpu_add_remove_lock to be held */
|
||||
static int __ref _cpu_down(unsigned int cpu, int tasks_frozen)
|
||||
{
|
||||
int err, nr_calls = 0;
|
||||
void *hcpu = (void *)(long)cpu;
|
||||
unsigned long mod = tasks_frozen ? CPU_TASKS_FROZEN : 0;
|
||||
struct take_cpu_down_param tcd_param = {
|
||||
.mod = mod,
|
||||
.hcpu = hcpu,
|
||||
};
|
||||
|
||||
if (num_online_cpus() == 1)
|
||||
return -EBUSY;
|
||||
|
||||
if (!cpu_online(cpu))
|
||||
return -EINVAL;
|
||||
|
||||
cpu_hotplug_begin();
|
||||
|
||||
err = __cpu_notify(CPU_DOWN_PREPARE | mod, hcpu, -1, &nr_calls);
|
||||
if (err) {
|
||||
nr_calls--;
|
||||
__cpu_notify(CPU_DOWN_FAILED | mod, hcpu, nr_calls, NULL);
|
||||
printk("%s: attempt to take down CPU %u failed\n",
|
||||
__func__, cpu);
|
||||
goto out_release;
|
||||
}
|
||||
|
||||
err = __stop_machine(take_cpu_down, &tcd_param, cpumask_of(cpu));
|
||||
if (err) {
|
||||
/* CPU didn't die: tell everyone. Can't complain. */
|
||||
cpu_notify_nofail(CPU_DOWN_FAILED | mod, hcpu);
|
||||
|
||||
goto out_release;
|
||||
}
|
||||
BUG_ON(cpu_online(cpu));
|
||||
|
||||
/*
|
||||
* The migration_call() CPU_DYING callback will have removed all
|
||||
* runnable tasks from the cpu, there's only the idle task left now
|
||||
* that the migration thread is done doing the stop_machine thing.
|
||||
*
|
||||
* Wait for the stop thread to go away.
|
||||
*/
|
||||
while (!idle_cpu(cpu))
|
||||
cpu_relax();
|
||||
|
||||
/* This actually kills the CPU. */
|
||||
__cpu_die(cpu);
|
||||
|
||||
/* CPU is completely dead: tell everyone. Too late to complain. */
|
||||
cpu_notify_nofail(CPU_DEAD | mod, hcpu);
|
||||
|
||||
check_for_tasks(cpu);
|
||||
|
||||
out_release:
|
||||
cpu_hotplug_done();
|
||||
if (!err)
|
||||
cpu_notify_nofail(CPU_POST_DEAD | mod, hcpu);
|
||||
return err;
|
||||
}
|
||||
|
||||
int __ref cpu_down(unsigned int cpu)
|
||||
{
|
||||
int err;
|
||||
|
||||
cpu_maps_update_begin();
|
||||
|
||||
if (cpu_hotplug_disabled) {
|
||||
err = -EBUSY;
|
||||
goto out;
|
||||
}
|
||||
|
||||
err = _cpu_down(cpu, 0);
|
||||
|
||||
out:
|
||||
cpu_maps_update_done();
|
||||
return err;
|
||||
}
|
||||
EXPORT_SYMBOL(cpu_down);
|
||||
#endif /*CONFIG_HOTPLUG_CPU*/
|
||||
|
||||
/* Requires cpu_add_remove_lock to be held */
|
||||
static int __cpuinit _cpu_up(unsigned int cpu, int tasks_frozen)
|
||||
{
|
||||
int ret, nr_calls = 0;
|
||||
void *hcpu = (void *)(long)cpu;
|
||||
unsigned long mod = tasks_frozen ? CPU_TASKS_FROZEN : 0;
|
||||
struct task_struct *idle;
|
||||
|
||||
if (cpu_online(cpu) || !cpu_present(cpu))
|
||||
return -EINVAL;
|
||||
|
||||
cpu_hotplug_begin();
|
||||
|
||||
idle = idle_thread_get(cpu);
|
||||
if (IS_ERR(idle)) {
|
||||
ret = PTR_ERR(idle);
|
||||
goto out;
|
||||
}
|
||||
|
||||
ret = __cpu_notify(CPU_UP_PREPARE | mod, hcpu, -1, &nr_calls);
|
||||
if (ret) {
|
||||
nr_calls--;
|
||||
printk(KERN_WARNING "%s: attempt to bring up CPU %u failed\n",
|
||||
__func__, cpu);
|
||||
goto out_notify;
|
||||
}
|
||||
|
||||
/* Arch-specific enabling code. */
|
||||
ret = __cpu_up(cpu, idle);
|
||||
if (ret != 0)
|
||||
goto out_notify;
|
||||
BUG_ON(!cpu_online(cpu));
|
||||
|
||||
/* Now call notifier in preparation. */
|
||||
cpu_notify(CPU_ONLINE | mod, hcpu);
|
||||
|
||||
out_notify:
|
||||
if (ret != 0)
|
||||
__cpu_notify(CPU_UP_CANCELED | mod, hcpu, nr_calls, NULL);
|
||||
out:
|
||||
cpu_hotplug_done();
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int __cpuinit cpu_up(unsigned int cpu)
|
||||
{
|
||||
int err = 0;
|
||||
|
||||
#ifdef CONFIG_MEMORY_HOTPLUG
|
||||
int nid;
|
||||
pg_data_t *pgdat;
|
||||
#endif
|
||||
|
||||
if (!cpu_possible(cpu)) {
|
||||
printk(KERN_ERR "can't online cpu %d because it is not "
|
||||
"configured as may-hotadd at boot time\n", cpu);
|
||||
#if defined(CONFIG_IA64)
|
||||
printk(KERN_ERR "please check additional_cpus= boot "
|
||||
"parameter\n");
|
||||
#endif
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
#ifdef CONFIG_MEMORY_HOTPLUG
|
||||
nid = cpu_to_node(cpu);
|
||||
if (!node_online(nid)) {
|
||||
err = mem_online_node(nid);
|
||||
if (err)
|
||||
return err;
|
||||
}
|
||||
|
||||
pgdat = NODE_DATA(nid);
|
||||
if (!pgdat) {
|
||||
printk(KERN_ERR
|
||||
"Can't online cpu %d due to NULL pgdat\n", cpu);
|
||||
return -ENOMEM;
|
||||
}
|
||||
|
||||
if (pgdat->node_zonelists->_zonerefs->zone == NULL) {
|
||||
mutex_lock(&zonelists_mutex);
|
||||
build_all_zonelists(NULL);
|
||||
mutex_unlock(&zonelists_mutex);
|
||||
}
|
||||
#endif
|
||||
|
||||
cpu_maps_update_begin();
|
||||
|
||||
if (cpu_hotplug_disabled) {
|
||||
err = -EBUSY;
|
||||
goto out;
|
||||
}
|
||||
|
||||
err = _cpu_up(cpu, 0);
|
||||
|
||||
out:
|
||||
cpu_maps_update_done();
|
||||
return err;
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(cpu_up);
|
||||
|
||||
#ifdef CONFIG_PM_SLEEP_SMP
|
||||
static cpumask_var_t frozen_cpus;
|
||||
|
||||
void __weak arch_disable_nonboot_cpus_begin(void)
|
||||
{
|
||||
}
|
||||
|
||||
void __weak arch_disable_nonboot_cpus_end(void)
|
||||
{
|
||||
}
|
||||
|
||||
int disable_nonboot_cpus(void)
|
||||
{
|
||||
int cpu, first_cpu, error = 0;
|
||||
|
||||
cpu_maps_update_begin();
|
||||
first_cpu = cpumask_first(cpu_online_mask);
|
||||
/*
|
||||
* We take down all of the non-boot CPUs in one shot to avoid races
|
||||
* with the userspace trying to use the CPU hotplug at the same time
|
||||
*/
|
||||
cpumask_clear(frozen_cpus);
|
||||
arch_disable_nonboot_cpus_begin();
|
||||
|
||||
printk("Disabling non-boot CPUs ...\n");
|
||||
for_each_online_cpu(cpu) {
|
||||
if (cpu == first_cpu)
|
||||
continue;
|
||||
error = _cpu_down(cpu, 1);
|
||||
if (!error)
|
||||
cpumask_set_cpu(cpu, frozen_cpus);
|
||||
else {
|
||||
printk(KERN_ERR "Error taking CPU%d down: %d\n",
|
||||
cpu, error);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
arch_disable_nonboot_cpus_end();
|
||||
|
||||
if (!error) {
|
||||
BUG_ON(num_online_cpus() > 1);
|
||||
/* Make sure the CPUs won't be enabled by someone else */
|
||||
cpu_hotplug_disabled = 1;
|
||||
} else {
|
||||
printk(KERN_ERR "Non-boot CPUs are not disabled\n");
|
||||
}
|
||||
cpu_maps_update_done();
|
||||
return error;
|
||||
}
|
||||
|
||||
void __weak arch_enable_nonboot_cpus_begin(void)
|
||||
{
|
||||
}
|
||||
|
||||
void __weak arch_enable_nonboot_cpus_end(void)
|
||||
{
|
||||
}
|
||||
|
||||
void __ref enable_nonboot_cpus(void)
|
||||
{
|
||||
int cpu, error;
|
||||
|
||||
/* Allow everyone to use the CPU hotplug again */
|
||||
cpu_maps_update_begin();
|
||||
cpu_hotplug_disabled = 0;
|
||||
if (cpumask_empty(frozen_cpus))
|
||||
goto out;
|
||||
|
||||
printk(KERN_INFO "Enabling non-boot CPUs ...\n");
|
||||
|
||||
arch_enable_nonboot_cpus_begin();
|
||||
|
||||
for_each_cpu(cpu, frozen_cpus) {
|
||||
error = _cpu_up(cpu, 1);
|
||||
if (!error) {
|
||||
printk(KERN_INFO "CPU%d is up\n", cpu);
|
||||
continue;
|
||||
}
|
||||
printk(KERN_WARNING "Error taking CPU%d up: %d\n", cpu, error);
|
||||
}
|
||||
|
||||
arch_enable_nonboot_cpus_end();
|
||||
|
||||
cpumask_clear(frozen_cpus);
|
||||
out:
|
||||
cpu_maps_update_done();
|
||||
}
|
||||
|
||||
static int __init alloc_frozen_cpus(void)
|
||||
{
|
||||
if (!alloc_cpumask_var(&frozen_cpus, GFP_KERNEL|__GFP_ZERO))
|
||||
return -ENOMEM;
|
||||
return 0;
|
||||
}
|
||||
core_initcall(alloc_frozen_cpus);
|
||||
|
||||
/*
|
||||
* Prevent regular CPU hotplug from racing with the freezer, by disabling CPU
|
||||
* hotplug when tasks are about to be frozen. Also, don't allow the freezer
|
||||
* to continue until any currently running CPU hotplug operation gets
|
||||
* completed.
|
||||
* To modify the 'cpu_hotplug_disabled' flag, we need to acquire the
|
||||
* 'cpu_add_remove_lock'. And this same lock is also taken by the regular
|
||||
* CPU hotplug path and released only after it is complete. Thus, we
|
||||
* (and hence the freezer) will block here until any currently running CPU
|
||||
* hotplug operation gets completed.
|
||||
*/
|
||||
void cpu_hotplug_disable_before_freeze(void)
|
||||
{
|
||||
cpu_maps_update_begin();
|
||||
cpu_hotplug_disabled = 1;
|
||||
cpu_maps_update_done();
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* When tasks have been thawed, re-enable regular CPU hotplug (which had been
|
||||
* disabled while beginning to freeze tasks).
|
||||
*/
|
||||
void cpu_hotplug_enable_after_thaw(void)
|
||||
{
|
||||
cpu_maps_update_begin();
|
||||
cpu_hotplug_disabled = 0;
|
||||
cpu_maps_update_done();
|
||||
}
|
||||
|
||||
/*
|
||||
* When callbacks for CPU hotplug notifications are being executed, we must
|
||||
* ensure that the state of the system with respect to the tasks being frozen
|
||||
* or not, as reported by the notification, remains unchanged *throughout the
|
||||
* duration* of the execution of the callbacks.
|
||||
* Hence we need to prevent the freezer from racing with regular CPU hotplug.
|
||||
*
|
||||
* This synchronization is implemented by mutually excluding regular CPU
|
||||
* hotplug and Suspend/Hibernate call paths by hooking onto the Suspend/
|
||||
* Hibernate notifications.
|
||||
*/
|
||||
static int
|
||||
cpu_hotplug_pm_callback(struct notifier_block *nb,
|
||||
unsigned long action, void *ptr)
|
||||
{
|
||||
switch (action) {
|
||||
|
||||
case PM_SUSPEND_PREPARE:
|
||||
case PM_HIBERNATION_PREPARE:
|
||||
cpu_hotplug_disable_before_freeze();
|
||||
break;
|
||||
|
||||
case PM_POST_SUSPEND:
|
||||
case PM_POST_HIBERNATION:
|
||||
cpu_hotplug_enable_after_thaw();
|
||||
break;
|
||||
|
||||
default:
|
||||
return NOTIFY_DONE;
|
||||
}
|
||||
|
||||
return NOTIFY_OK;
|
||||
}
|
||||
|
||||
|
||||
static int __init cpu_hotplug_pm_sync_init(void)
|
||||
{
|
||||
pm_notifier(cpu_hotplug_pm_callback, 0);
|
||||
return 0;
|
||||
}
|
||||
core_initcall(cpu_hotplug_pm_sync_init);
|
||||
|
||||
#endif /* CONFIG_PM_SLEEP_SMP */
|
||||
|
||||
/**
|
||||
* notify_cpu_starting(cpu) - call the CPU_STARTING notifiers
|
||||
* @cpu: cpu that just started
|
||||
*
|
||||
* This function calls the cpu_chain notifiers with CPU_STARTING.
|
||||
* It must be called by the arch code on the new cpu, before the new cpu
|
||||
* enables interrupts and before the "boot" cpu returns from __cpu_up().
|
||||
*/
|
||||
void __cpuinit notify_cpu_starting(unsigned int cpu)
|
||||
{
|
||||
unsigned long val = CPU_STARTING;
|
||||
|
||||
#ifdef CONFIG_PM_SLEEP_SMP
|
||||
if (frozen_cpus != NULL && cpumask_test_cpu(cpu, frozen_cpus))
|
||||
val = CPU_STARTING_FROZEN;
|
||||
#endif /* CONFIG_PM_SLEEP_SMP */
|
||||
cpu_notify(val, (void *)(long)cpu);
|
||||
}
|
||||
|
||||
#endif /* CONFIG_SMP */
|
||||
|
||||
/*
|
||||
* cpu_bit_bitmap[] is a special, "compressed" data structure that
|
||||
* represents all NR_CPUS bits binary values of 1<<nr.
|
||||
*
|
||||
* It is used by cpumask_of() to get a constant address to a CPU
|
||||
* mask value that has a single bit set only.
|
||||
*/
|
||||
|
||||
/* cpu_bit_bitmap[0] is empty - so we can back into it */
|
||||
#define MASK_DECLARE_1(x) [x+1][0] = (1UL << (x))
|
||||
#define MASK_DECLARE_2(x) MASK_DECLARE_1(x), MASK_DECLARE_1(x+1)
|
||||
#define MASK_DECLARE_4(x) MASK_DECLARE_2(x), MASK_DECLARE_2(x+2)
|
||||
#define MASK_DECLARE_8(x) MASK_DECLARE_4(x), MASK_DECLARE_4(x+4)
|
||||
|
||||
const unsigned long cpu_bit_bitmap[BITS_PER_LONG+1][BITS_TO_LONGS(NR_CPUS)] = {
|
||||
|
||||
MASK_DECLARE_8(0), MASK_DECLARE_8(8),
|
||||
MASK_DECLARE_8(16), MASK_DECLARE_8(24),
|
||||
#if BITS_PER_LONG > 32
|
||||
MASK_DECLARE_8(32), MASK_DECLARE_8(40),
|
||||
MASK_DECLARE_8(48), MASK_DECLARE_8(56),
|
||||
#endif
|
||||
};
|
||||
EXPORT_SYMBOL_GPL(cpu_bit_bitmap);
|
||||
|
||||
const DECLARE_BITMAP(cpu_all_bits, NR_CPUS) = CPU_BITS_ALL;
|
||||
EXPORT_SYMBOL(cpu_all_bits);
|
||||
|
||||
#ifdef CONFIG_INIT_ALL_POSSIBLE
|
||||
static DECLARE_BITMAP(cpu_possible_bits, CONFIG_NR_CPUS) __read_mostly
|
||||
= CPU_BITS_ALL;
|
||||
#else
|
||||
static DECLARE_BITMAP(cpu_possible_bits, CONFIG_NR_CPUS) __read_mostly;
|
||||
#endif
|
||||
const struct cpumask *const cpu_possible_mask = to_cpumask(cpu_possible_bits);
|
||||
EXPORT_SYMBOL(cpu_possible_mask);
|
||||
|
||||
static DECLARE_BITMAP(cpu_online_bits, CONFIG_NR_CPUS) __read_mostly;
|
||||
const struct cpumask *const cpu_online_mask = to_cpumask(cpu_online_bits);
|
||||
EXPORT_SYMBOL(cpu_online_mask);
|
||||
|
||||
static DECLARE_BITMAP(cpu_present_bits, CONFIG_NR_CPUS) __read_mostly;
|
||||
const struct cpumask *const cpu_present_mask = to_cpumask(cpu_present_bits);
|
||||
EXPORT_SYMBOL(cpu_present_mask);
|
||||
|
||||
static DECLARE_BITMAP(cpu_active_bits, CONFIG_NR_CPUS) __read_mostly;
|
||||
const struct cpumask *const cpu_active_mask = to_cpumask(cpu_active_bits);
|
||||
EXPORT_SYMBOL(cpu_active_mask);
|
||||
|
||||
void set_cpu_possible(unsigned int cpu, bool possible)
|
||||
{
|
||||
if (possible)
|
||||
cpumask_set_cpu(cpu, to_cpumask(cpu_possible_bits));
|
||||
else
|
||||
cpumask_clear_cpu(cpu, to_cpumask(cpu_possible_bits));
|
||||
}
|
||||
|
||||
void set_cpu_present(unsigned int cpu, bool present)
|
||||
{
|
||||
if (present)
|
||||
cpumask_set_cpu(cpu, to_cpumask(cpu_present_bits));
|
||||
else
|
||||
cpumask_clear_cpu(cpu, to_cpumask(cpu_present_bits));
|
||||
}
|
||||
|
||||
void set_cpu_online(unsigned int cpu, bool online)
|
||||
{
|
||||
if (online)
|
||||
cpumask_set_cpu(cpu, to_cpumask(cpu_online_bits));
|
||||
else
|
||||
cpumask_clear_cpu(cpu, to_cpumask(cpu_online_bits));
|
||||
}
|
||||
|
||||
void set_cpu_active(unsigned int cpu, bool active)
|
||||
{
|
||||
if (active)
|
||||
cpumask_set_cpu(cpu, to_cpumask(cpu_active_bits));
|
||||
else
|
||||
cpumask_clear_cpu(cpu, to_cpumask(cpu_active_bits));
|
||||
}
|
||||
|
||||
void init_cpu_present(const struct cpumask *src)
|
||||
{
|
||||
cpumask_copy(to_cpumask(cpu_present_bits), src);
|
||||
}
|
||||
|
||||
void init_cpu_possible(const struct cpumask *src)
|
||||
{
|
||||
cpumask_copy(to_cpumask(cpu_possible_bits), src);
|
||||
}
|
||||
|
||||
void init_cpu_online(const struct cpumask *src)
|
||||
{
|
||||
cpumask_copy(to_cpumask(cpu_online_bits), src);
|
||||
}
|
||||
784
test/fixtures/c/diff.c
vendored
Normal file
784
test/fixtures/c/diff.c
vendored
Normal file
@@ -0,0 +1,784 @@
|
||||
/*
|
||||
* Copyright (C) 2012 the libgit2 contributors
|
||||
*
|
||||
* This file is part of libgit2, distributed under the GNU GPL v2 with
|
||||
* a Linking Exception. For full terms see the included COPYING file.
|
||||
*/
|
||||
#include "common.h"
|
||||
#include "git2/diff.h"
|
||||
#include "diff.h"
|
||||
#include "fileops.h"
|
||||
#include "config.h"
|
||||
#include "attr_file.h"
|
||||
|
||||
static char *diff_prefix_from_pathspec(const git_strarray *pathspec)
|
||||
{
|
||||
git_buf prefix = GIT_BUF_INIT;
|
||||
const char *scan;
|
||||
|
||||
if (git_buf_common_prefix(&prefix, pathspec) < 0)
|
||||
return NULL;
|
||||
|
||||
/* diff prefix will only be leading non-wildcards */
|
||||
for (scan = prefix.ptr; *scan && !git__iswildcard(*scan); ++scan);
|
||||
git_buf_truncate(&prefix, scan - prefix.ptr);
|
||||
|
||||
if (prefix.size > 0)
|
||||
return git_buf_detach(&prefix);
|
||||
|
||||
git_buf_free(&prefix);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static bool diff_pathspec_is_interesting(const git_strarray *pathspec)
|
||||
{
|
||||
const char *str;
|
||||
|
||||
if (pathspec == NULL || pathspec->count == 0)
|
||||
return false;
|
||||
if (pathspec->count > 1)
|
||||
return true;
|
||||
|
||||
str = pathspec->strings[0];
|
||||
if (!str || !str[0] || (!str[1] && (str[0] == '*' || str[0] == '.')))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool diff_path_matches_pathspec(git_diff_list *diff, const char *path)
|
||||
{
|
||||
unsigned int i;
|
||||
git_attr_fnmatch *match;
|
||||
|
||||
if (!diff->pathspec.length)
|
||||
return true;
|
||||
|
||||
git_vector_foreach(&diff->pathspec, i, match) {
|
||||
int result = p_fnmatch(match->pattern, path, 0);
|
||||
|
||||
/* if we didn't match, look for exact dirname prefix match */
|
||||
if (result == FNM_NOMATCH &&
|
||||
(match->flags & GIT_ATTR_FNMATCH_HASWILD) == 0 &&
|
||||
strncmp(path, match->pattern, match->length) == 0 &&
|
||||
path[match->length] == '/')
|
||||
result = 0;
|
||||
|
||||
if (result == 0)
|
||||
return (match->flags & GIT_ATTR_FNMATCH_NEGATIVE) ? false : true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
static git_diff_delta *diff_delta__alloc(
|
||||
git_diff_list *diff,
|
||||
git_delta_t status,
|
||||
const char *path)
|
||||
{
|
||||
git_diff_delta *delta = git__calloc(1, sizeof(git_diff_delta));
|
||||
if (!delta)
|
||||
return NULL;
|
||||
|
||||
delta->old_file.path = git_pool_strdup(&diff->pool, path);
|
||||
if (delta->old_file.path == NULL) {
|
||||
git__free(delta);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
delta->new_file.path = delta->old_file.path;
|
||||
|
||||
if (diff->opts.flags & GIT_DIFF_REVERSE) {
|
||||
switch (status) {
|
||||
case GIT_DELTA_ADDED: status = GIT_DELTA_DELETED; break;
|
||||
case GIT_DELTA_DELETED: status = GIT_DELTA_ADDED; break;
|
||||
default: break; /* leave other status values alone */
|
||||
}
|
||||
}
|
||||
delta->status = status;
|
||||
|
||||
return delta;
|
||||
}
|
||||
|
||||
static git_diff_delta *diff_delta__dup(
|
||||
const git_diff_delta *d, git_pool *pool)
|
||||
{
|
||||
git_diff_delta *delta = git__malloc(sizeof(git_diff_delta));
|
||||
if (!delta)
|
||||
return NULL;
|
||||
|
||||
memcpy(delta, d, sizeof(git_diff_delta));
|
||||
|
||||
delta->old_file.path = git_pool_strdup(pool, d->old_file.path);
|
||||
if (delta->old_file.path == NULL)
|
||||
goto fail;
|
||||
|
||||
if (d->new_file.path != d->old_file.path) {
|
||||
delta->new_file.path = git_pool_strdup(pool, d->new_file.path);
|
||||
if (delta->new_file.path == NULL)
|
||||
goto fail;
|
||||
} else {
|
||||
delta->new_file.path = delta->old_file.path;
|
||||
}
|
||||
|
||||
return delta;
|
||||
|
||||
fail:
|
||||
git__free(delta);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static git_diff_delta *diff_delta__merge_like_cgit(
|
||||
const git_diff_delta *a, const git_diff_delta *b, git_pool *pool)
|
||||
{
|
||||
git_diff_delta *dup = diff_delta__dup(a, pool);
|
||||
if (!dup)
|
||||
return NULL;
|
||||
|
||||
if (git_oid_cmp(&dup->new_file.oid, &b->new_file.oid) == 0)
|
||||
return dup;
|
||||
|
||||
git_oid_cpy(&dup->new_file.oid, &b->new_file.oid);
|
||||
|
||||
dup->new_file.mode = b->new_file.mode;
|
||||
dup->new_file.size = b->new_file.size;
|
||||
dup->new_file.flags = b->new_file.flags;
|
||||
|
||||
/* Emulate C git for merging two diffs (a la 'git diff <sha>').
|
||||
*
|
||||
* When C git does a diff between the work dir and a tree, it actually
|
||||
* diffs with the index but uses the workdir contents. This emulates
|
||||
* those choices so we can emulate the type of diff.
|
||||
*/
|
||||
if (git_oid_cmp(&dup->old_file.oid, &dup->new_file.oid) == 0) {
|
||||
if (dup->status == GIT_DELTA_DELETED)
|
||||
/* preserve pending delete info */;
|
||||
else if (b->status == GIT_DELTA_UNTRACKED ||
|
||||
b->status == GIT_DELTA_IGNORED)
|
||||
dup->status = b->status;
|
||||
else
|
||||
dup->status = GIT_DELTA_UNMODIFIED;
|
||||
}
|
||||
else if (dup->status == GIT_DELTA_UNMODIFIED ||
|
||||
b->status == GIT_DELTA_DELETED)
|
||||
dup->status = b->status;
|
||||
|
||||
return dup;
|
||||
}
|
||||
|
||||
static int diff_delta__from_one(
|
||||
git_diff_list *diff,
|
||||
git_delta_t status,
|
||||
const git_index_entry *entry)
|
||||
{
|
||||
git_diff_delta *delta;
|
||||
|
||||
if (status == GIT_DELTA_IGNORED &&
|
||||
(diff->opts.flags & GIT_DIFF_INCLUDE_IGNORED) == 0)
|
||||
return 0;
|
||||
|
||||
if (status == GIT_DELTA_UNTRACKED &&
|
||||
(diff->opts.flags & GIT_DIFF_INCLUDE_UNTRACKED) == 0)
|
||||
return 0;
|
||||
|
||||
if (!diff_path_matches_pathspec(diff, entry->path))
|
||||
return 0;
|
||||
|
||||
delta = diff_delta__alloc(diff, status, entry->path);
|
||||
GITERR_CHECK_ALLOC(delta);
|
||||
|
||||
/* This fn is just for single-sided diffs */
|
||||
assert(status != GIT_DELTA_MODIFIED);
|
||||
|
||||
if (delta->status == GIT_DELTA_DELETED) {
|
||||
delta->old_file.mode = entry->mode;
|
||||
delta->old_file.size = entry->file_size;
|
||||
git_oid_cpy(&delta->old_file.oid, &entry->oid);
|
||||
} else /* ADDED, IGNORED, UNTRACKED */ {
|
||||
delta->new_file.mode = entry->mode;
|
||||
delta->new_file.size = entry->file_size;
|
||||
git_oid_cpy(&delta->new_file.oid, &entry->oid);
|
||||
}
|
||||
|
||||
delta->old_file.flags |= GIT_DIFF_FILE_VALID_OID;
|
||||
delta->new_file.flags |= GIT_DIFF_FILE_VALID_OID;
|
||||
|
||||
if (git_vector_insert(&diff->deltas, delta) < 0) {
|
||||
git__free(delta);
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int diff_delta__from_two(
|
||||
git_diff_list *diff,
|
||||
git_delta_t status,
|
||||
const git_index_entry *old_entry,
|
||||
const git_index_entry *new_entry,
|
||||
git_oid *new_oid)
|
||||
{
|
||||
git_diff_delta *delta;
|
||||
|
||||
if (status == GIT_DELTA_UNMODIFIED &&
|
||||
(diff->opts.flags & GIT_DIFF_INCLUDE_UNMODIFIED) == 0)
|
||||
return 0;
|
||||
|
||||
if ((diff->opts.flags & GIT_DIFF_REVERSE) != 0) {
|
||||
const git_index_entry *temp = old_entry;
|
||||
old_entry = new_entry;
|
||||
new_entry = temp;
|
||||
}
|
||||
|
||||
delta = diff_delta__alloc(diff, status, old_entry->path);
|
||||
GITERR_CHECK_ALLOC(delta);
|
||||
|
||||
delta->old_file.mode = old_entry->mode;
|
||||
git_oid_cpy(&delta->old_file.oid, &old_entry->oid);
|
||||
delta->old_file.flags |= GIT_DIFF_FILE_VALID_OID;
|
||||
|
||||
delta->new_file.mode = new_entry->mode;
|
||||
git_oid_cpy(&delta->new_file.oid, new_oid ? new_oid : &new_entry->oid);
|
||||
if (new_oid || !git_oid_iszero(&new_entry->oid))
|
||||
delta->new_file.flags |= GIT_DIFF_FILE_VALID_OID;
|
||||
|
||||
if (git_vector_insert(&diff->deltas, delta) < 0) {
|
||||
git__free(delta);
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static char *diff_strdup_prefix(git_pool *pool, const char *prefix)
|
||||
{
|
||||
size_t len = strlen(prefix);
|
||||
|
||||
/* append '/' at end if needed */
|
||||
if (len > 0 && prefix[len - 1] != '/')
|
||||
return git_pool_strcat(pool, prefix, "/");
|
||||
else
|
||||
return git_pool_strndup(pool, prefix, len + 1);
|
||||
}
|
||||
|
||||
static int diff_delta__cmp(const void *a, const void *b)
|
||||
{
|
||||
const git_diff_delta *da = a, *db = b;
|
||||
int val = strcmp(da->old_file.path, db->old_file.path);
|
||||
return val ? val : ((int)da->status - (int)db->status);
|
||||
}
|
||||
|
||||
static int config_bool(git_config *cfg, const char *name, int defvalue)
|
||||
{
|
||||
int val = defvalue;
|
||||
|
||||
if (git_config_get_bool(&val, cfg, name) < 0)
|
||||
giterr_clear();
|
||||
|
||||
return val;
|
||||
}
|
||||
|
||||
static git_diff_list *git_diff_list_alloc(
|
||||
git_repository *repo, const git_diff_options *opts)
|
||||
{
|
||||
git_config *cfg;
|
||||
size_t i;
|
||||
git_diff_list *diff = git__calloc(1, sizeof(git_diff_list));
|
||||
if (diff == NULL)
|
||||
return NULL;
|
||||
|
||||
diff->repo = repo;
|
||||
|
||||
if (git_vector_init(&diff->deltas, 0, diff_delta__cmp) < 0 ||
|
||||
git_pool_init(&diff->pool, 1, 0) < 0)
|
||||
goto fail;
|
||||
|
||||
/* load config values that affect diff behavior */
|
||||
if (git_repository_config__weakptr(&cfg, repo) < 0)
|
||||
goto fail;
|
||||
if (config_bool(cfg, "core.symlinks", 1))
|
||||
diff->diffcaps = diff->diffcaps | GIT_DIFFCAPS_HAS_SYMLINKS;
|
||||
if (config_bool(cfg, "core.ignorestat", 0))
|
||||
diff->diffcaps = diff->diffcaps | GIT_DIFFCAPS_ASSUME_UNCHANGED;
|
||||
if (config_bool(cfg, "core.filemode", 1))
|
||||
diff->diffcaps = diff->diffcaps | GIT_DIFFCAPS_TRUST_EXEC_BIT;
|
||||
if (config_bool(cfg, "core.trustctime", 1))
|
||||
diff->diffcaps = diff->diffcaps | GIT_DIFFCAPS_TRUST_CTIME;
|
||||
/* Don't set GIT_DIFFCAPS_USE_DEV - compile time option in core git */
|
||||
|
||||
if (opts == NULL)
|
||||
return diff;
|
||||
|
||||
memcpy(&diff->opts, opts, sizeof(git_diff_options));
|
||||
memset(&diff->opts.pathspec, 0, sizeof(diff->opts.pathspec));
|
||||
|
||||
diff->opts.old_prefix = diff_strdup_prefix(&diff->pool,
|
||||
opts->old_prefix ? opts->old_prefix : DIFF_OLD_PREFIX_DEFAULT);
|
||||
diff->opts.new_prefix = diff_strdup_prefix(&diff->pool,
|
||||
opts->new_prefix ? opts->new_prefix : DIFF_NEW_PREFIX_DEFAULT);
|
||||
|
||||
if (!diff->opts.old_prefix || !diff->opts.new_prefix)
|
||||
goto fail;
|
||||
|
||||
if (diff->opts.flags & GIT_DIFF_REVERSE) {
|
||||
char *swap = diff->opts.old_prefix;
|
||||
diff->opts.old_prefix = diff->opts.new_prefix;
|
||||
diff->opts.new_prefix = swap;
|
||||
}
|
||||
|
||||
/* only copy pathspec if it is "interesting" so we can test
|
||||
* diff->pathspec.length > 0 to know if it is worth calling
|
||||
* fnmatch as we iterate.
|
||||
*/
|
||||
if (!diff_pathspec_is_interesting(&opts->pathspec))
|
||||
return diff;
|
||||
|
||||
if (git_vector_init(
|
||||
&diff->pathspec, (unsigned int)opts->pathspec.count, NULL) < 0)
|
||||
goto fail;
|
||||
|
||||
for (i = 0; i < opts->pathspec.count; ++i) {
|
||||
int ret;
|
||||
const char *pattern = opts->pathspec.strings[i];
|
||||
git_attr_fnmatch *match = git__calloc(1, sizeof(git_attr_fnmatch));
|
||||
if (!match)
|
||||
goto fail;
|
||||
match->flags = GIT_ATTR_FNMATCH_ALLOWSPACE;
|
||||
ret = git_attr_fnmatch__parse(match, &diff->pool, NULL, &pattern);
|
||||
if (ret == GIT_ENOTFOUND) {
|
||||
git__free(match);
|
||||
continue;
|
||||
} else if (ret < 0)
|
||||
goto fail;
|
||||
|
||||
if (git_vector_insert(&diff->pathspec, match) < 0)
|
||||
goto fail;
|
||||
}
|
||||
|
||||
return diff;
|
||||
|
||||
fail:
|
||||
git_diff_list_free(diff);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void git_diff_list_free(git_diff_list *diff)
|
||||
{
|
||||
git_diff_delta *delta;
|
||||
git_attr_fnmatch *match;
|
||||
unsigned int i;
|
||||
|
||||
if (!diff)
|
||||
return;
|
||||
|
||||
git_vector_foreach(&diff->deltas, i, delta) {
|
||||
git__free(delta);
|
||||
diff->deltas.contents[i] = NULL;
|
||||
}
|
||||
git_vector_free(&diff->deltas);
|
||||
|
||||
git_vector_foreach(&diff->pathspec, i, match) {
|
||||
git__free(match);
|
||||
diff->pathspec.contents[i] = NULL;
|
||||
}
|
||||
git_vector_free(&diff->pathspec);
|
||||
|
||||
git_pool_clear(&diff->pool);
|
||||
git__free(diff);
|
||||
}
|
||||
|
||||
static int oid_for_workdir_item(
|
||||
git_repository *repo,
|
||||
const git_index_entry *item,
|
||||
git_oid *oid)
|
||||
{
|
||||
int result;
|
||||
git_buf full_path = GIT_BUF_INIT;
|
||||
|
||||
if (git_buf_joinpath(&full_path, git_repository_workdir(repo), item->path) < 0)
|
||||
return -1;
|
||||
|
||||
/* calculate OID for file if possible*/
|
||||
if (S_ISLNK(item->mode))
|
||||
result = git_odb__hashlink(oid, full_path.ptr);
|
||||
else if (!git__is_sizet(item->file_size)) {
|
||||
giterr_set(GITERR_OS, "File size overflow for 32-bit systems");
|
||||
result = -1;
|
||||
} else {
|
||||
int fd = git_futils_open_ro(full_path.ptr);
|
||||
if (fd < 0)
|
||||
result = fd;
|
||||
else {
|
||||
result = git_odb__hashfd(
|
||||
oid, fd, (size_t)item->file_size, GIT_OBJ_BLOB);
|
||||
p_close(fd);
|
||||
}
|
||||
}
|
||||
|
||||
git_buf_free(&full_path);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
#define EXEC_BIT_MASK 0000111
|
||||
|
||||
static int maybe_modified(
|
||||
git_iterator *old_iter,
|
||||
const git_index_entry *oitem,
|
||||
git_iterator *new_iter,
|
||||
const git_index_entry *nitem,
|
||||
git_diff_list *diff)
|
||||
{
|
||||
git_oid noid, *use_noid = NULL;
|
||||
git_delta_t status = GIT_DELTA_MODIFIED;
|
||||
unsigned int omode = oitem->mode;
|
||||
unsigned int nmode = nitem->mode;
|
||||
|
||||
GIT_UNUSED(old_iter);
|
||||
|
||||
if (!diff_path_matches_pathspec(diff, oitem->path))
|
||||
return 0;
|
||||
|
||||
/* on platforms with no symlinks, promote plain files to symlinks */
|
||||
if (S_ISLNK(omode) && S_ISREG(nmode) &&
|
||||
!(diff->diffcaps & GIT_DIFFCAPS_HAS_SYMLINKS))
|
||||
nmode = GIT_MODE_TYPE(omode) | (nmode & GIT_MODE_PERMS_MASK);
|
||||
|
||||
/* on platforms with no execmode, clear exec bit from comparisons */
|
||||
if (!(diff->diffcaps & GIT_DIFFCAPS_TRUST_EXEC_BIT)) {
|
||||
omode = omode & ~EXEC_BIT_MASK;
|
||||
nmode = nmode & ~EXEC_BIT_MASK;
|
||||
}
|
||||
|
||||
/* support "assume unchanged" (badly, b/c we still stat everything) */
|
||||
if ((diff->diffcaps & GIT_DIFFCAPS_ASSUME_UNCHANGED) != 0)
|
||||
status = (oitem->flags_extended & GIT_IDXENTRY_INTENT_TO_ADD) ?
|
||||
GIT_DELTA_MODIFIED : GIT_DELTA_UNMODIFIED;
|
||||
|
||||
/* support "skip worktree" index bit */
|
||||
else if ((oitem->flags_extended & GIT_IDXENTRY_SKIP_WORKTREE) != 0)
|
||||
status = GIT_DELTA_UNMODIFIED;
|
||||
|
||||
/* if basic type of file changed, then split into delete and add */
|
||||
else if (GIT_MODE_TYPE(omode) != GIT_MODE_TYPE(nmode)) {
|
||||
if (diff_delta__from_one(diff, GIT_DELTA_DELETED, oitem) < 0 ||
|
||||
diff_delta__from_one(diff, GIT_DELTA_ADDED, nitem) < 0)
|
||||
return -1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* if oids and modes match, then file is unmodified */
|
||||
else if (git_oid_cmp(&oitem->oid, &nitem->oid) == 0 &&
|
||||
omode == nmode)
|
||||
status = GIT_DELTA_UNMODIFIED;
|
||||
|
||||
/* if we have a workdir item with an unknown oid, check deeper */
|
||||
else if (git_oid_iszero(&nitem->oid) && new_iter->type == GIT_ITERATOR_WORKDIR) {
|
||||
/* TODO: add check against index file st_mtime to avoid racy-git */
|
||||
|
||||
/* if they files look exactly alike, then we'll assume the same */
|
||||
if (oitem->file_size == nitem->file_size &&
|
||||
(!(diff->diffcaps & GIT_DIFFCAPS_TRUST_CTIME) ||
|
||||
(oitem->ctime.seconds == nitem->ctime.seconds)) &&
|
||||
oitem->mtime.seconds == nitem->mtime.seconds &&
|
||||
(!(diff->diffcaps & GIT_DIFFCAPS_USE_DEV) ||
|
||||
(oitem->dev == nitem->dev)) &&
|
||||
oitem->ino == nitem->ino &&
|
||||
oitem->uid == nitem->uid &&
|
||||
oitem->gid == nitem->gid)
|
||||
status = GIT_DELTA_UNMODIFIED;
|
||||
|
||||
else if (S_ISGITLINK(nmode)) {
|
||||
git_submodule *sub;
|
||||
|
||||
if ((diff->opts.flags & GIT_DIFF_IGNORE_SUBMODULES) != 0)
|
||||
status = GIT_DELTA_UNMODIFIED;
|
||||
else if (git_submodule_lookup(&sub, diff->repo, nitem->path) < 0)
|
||||
return -1;
|
||||
else if (sub->ignore == GIT_SUBMODULE_IGNORE_ALL)
|
||||
status = GIT_DELTA_UNMODIFIED;
|
||||
else {
|
||||
/* TODO: support other GIT_SUBMODULE_IGNORE values */
|
||||
status = GIT_DELTA_UNMODIFIED;
|
||||
}
|
||||
}
|
||||
|
||||
/* TODO: check git attributes so we will not have to read the file
|
||||
* in if it is marked binary.
|
||||
*/
|
||||
|
||||
else if (oid_for_workdir_item(diff->repo, nitem, &noid) < 0)
|
||||
return -1;
|
||||
|
||||
else if (git_oid_cmp(&oitem->oid, &noid) == 0 &&
|
||||
omode == nmode)
|
||||
status = GIT_DELTA_UNMODIFIED;
|
||||
|
||||
/* store calculated oid so we don't have to recalc later */
|
||||
use_noid = &noid;
|
||||
}
|
||||
|
||||
return diff_delta__from_two(diff, status, oitem, nitem, use_noid);
|
||||
}
|
||||
|
||||
static int diff_from_iterators(
|
||||
git_repository *repo,
|
||||
const git_diff_options *opts, /**< can be NULL for defaults */
|
||||
git_iterator *old_iter,
|
||||
git_iterator *new_iter,
|
||||
git_diff_list **diff_ptr)
|
||||
{
|
||||
const git_index_entry *oitem, *nitem;
|
||||
git_buf ignore_prefix = GIT_BUF_INIT;
|
||||
git_diff_list *diff = git_diff_list_alloc(repo, opts);
|
||||
if (!diff)
|
||||
goto fail;
|
||||
|
||||
diff->old_src = old_iter->type;
|
||||
diff->new_src = new_iter->type;
|
||||
|
||||
if (git_iterator_current(old_iter, &oitem) < 0 ||
|
||||
git_iterator_current(new_iter, &nitem) < 0)
|
||||
goto fail;
|
||||
|
||||
/* run iterators building diffs */
|
||||
while (oitem || nitem) {
|
||||
|
||||
/* create DELETED records for old items not matched in new */
|
||||
if (oitem && (!nitem || strcmp(oitem->path, nitem->path) < 0)) {
|
||||
if (diff_delta__from_one(diff, GIT_DELTA_DELETED, oitem) < 0 ||
|
||||
git_iterator_advance(old_iter, &oitem) < 0)
|
||||
goto fail;
|
||||
}
|
||||
|
||||
/* create ADDED, TRACKED, or IGNORED records for new items not
|
||||
* matched in old (and/or descend into directories as needed)
|
||||
*/
|
||||
else if (nitem && (!oitem || strcmp(oitem->path, nitem->path) > 0)) {
|
||||
git_delta_t delta_type = GIT_DELTA_UNTRACKED;
|
||||
|
||||
/* check if contained in ignored parent directory */
|
||||
if (git_buf_len(&ignore_prefix) &&
|
||||
git__prefixcmp(nitem->path, git_buf_cstr(&ignore_prefix)) == 0)
|
||||
delta_type = GIT_DELTA_IGNORED;
|
||||
|
||||
if (S_ISDIR(nitem->mode)) {
|
||||
/* recurse into directory only if there are tracked items in
|
||||
* it or if the user requested the contents of untracked
|
||||
* directories and it is not under an ignored directory.
|
||||
*/
|
||||
if ((oitem && git__prefixcmp(oitem->path, nitem->path) == 0) ||
|
||||
(delta_type == GIT_DELTA_UNTRACKED &&
|
||||
(diff->opts.flags & GIT_DIFF_RECURSE_UNTRACKED_DIRS) != 0))
|
||||
{
|
||||
/* if this directory is ignored, remember it as the
|
||||
* "ignore_prefix" for processing contained items
|
||||
*/
|
||||
if (delta_type == GIT_DELTA_UNTRACKED &&
|
||||
git_iterator_current_is_ignored(new_iter))
|
||||
git_buf_sets(&ignore_prefix, nitem->path);
|
||||
|
||||
if (git_iterator_advance_into_directory(new_iter, &nitem) < 0)
|
||||
goto fail;
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
/* In core git, the next two "else if" clauses are effectively
|
||||
* reversed -- i.e. when an untracked file contained in an
|
||||
* ignored directory is individually ignored, it shows up as an
|
||||
* ignored file in the diff list, even though other untracked
|
||||
* files in the same directory are skipped completely.
|
||||
*
|
||||
* To me, this is odd. If the directory is ignored and the file
|
||||
* is untracked, we should skip it consistently, regardless of
|
||||
* whether it happens to match a pattern in the ignore file.
|
||||
*
|
||||
* To match the core git behavior, just reverse the following
|
||||
* two "else if" cases so that individual file ignores are
|
||||
* checked before container directory exclusions are used to
|
||||
* skip the file.
|
||||
*/
|
||||
else if (delta_type == GIT_DELTA_IGNORED) {
|
||||
if (git_iterator_advance(new_iter, &nitem) < 0)
|
||||
goto fail;
|
||||
continue; /* ignored parent directory, so skip completely */
|
||||
}
|
||||
|
||||
else if (git_iterator_current_is_ignored(new_iter))
|
||||
delta_type = GIT_DELTA_IGNORED;
|
||||
|
||||
else if (new_iter->type != GIT_ITERATOR_WORKDIR)
|
||||
delta_type = GIT_DELTA_ADDED;
|
||||
|
||||
if (diff_delta__from_one(diff, delta_type, nitem) < 0 ||
|
||||
git_iterator_advance(new_iter, &nitem) < 0)
|
||||
goto fail;
|
||||
}
|
||||
|
||||
/* otherwise item paths match, so create MODIFIED record
|
||||
* (or ADDED and DELETED pair if type changed)
|
||||
*/
|
||||
else {
|
||||
assert(oitem && nitem && strcmp(oitem->path, nitem->path) == 0);
|
||||
|
||||
if (maybe_modified(old_iter, oitem, new_iter, nitem, diff) < 0 ||
|
||||
git_iterator_advance(old_iter, &oitem) < 0 ||
|
||||
git_iterator_advance(new_iter, &nitem) < 0)
|
||||
goto fail;
|
||||
}
|
||||
}
|
||||
|
||||
git_iterator_free(old_iter);
|
||||
git_iterator_free(new_iter);
|
||||
git_buf_free(&ignore_prefix);
|
||||
|
||||
*diff_ptr = diff;
|
||||
return 0;
|
||||
|
||||
fail:
|
||||
git_iterator_free(old_iter);
|
||||
git_iterator_free(new_iter);
|
||||
git_buf_free(&ignore_prefix);
|
||||
|
||||
git_diff_list_free(diff);
|
||||
*diff_ptr = NULL;
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
int git_diff_tree_to_tree(
|
||||
git_repository *repo,
|
||||
const git_diff_options *opts, /**< can be NULL for defaults */
|
||||
git_tree *old_tree,
|
||||
git_tree *new_tree,
|
||||
git_diff_list **diff)
|
||||
{
|
||||
git_iterator *a = NULL, *b = NULL;
|
||||
char *prefix = opts ? diff_prefix_from_pathspec(&opts->pathspec) : NULL;
|
||||
|
||||
assert(repo && old_tree && new_tree && diff);
|
||||
|
||||
if (git_iterator_for_tree_range(&a, repo, old_tree, prefix, prefix) < 0 ||
|
||||
git_iterator_for_tree_range(&b, repo, new_tree, prefix, prefix) < 0)
|
||||
return -1;
|
||||
|
||||
git__free(prefix);
|
||||
|
||||
return diff_from_iterators(repo, opts, a, b, diff);
|
||||
}
|
||||
|
||||
int git_diff_index_to_tree(
|
||||
git_repository *repo,
|
||||
const git_diff_options *opts,
|
||||
git_tree *old_tree,
|
||||
git_diff_list **diff)
|
||||
{
|
||||
git_iterator *a = NULL, *b = NULL;
|
||||
char *prefix = opts ? diff_prefix_from_pathspec(&opts->pathspec) : NULL;
|
||||
|
||||
assert(repo && diff);
|
||||
|
||||
if (git_iterator_for_tree_range(&a, repo, old_tree, prefix, prefix) < 0 ||
|
||||
git_iterator_for_index_range(&b, repo, prefix, prefix) < 0)
|
||||
return -1;
|
||||
|
||||
git__free(prefix);
|
||||
|
||||
return diff_from_iterators(repo, opts, a, b, diff);
|
||||
}
|
||||
|
||||
int git_diff_workdir_to_index(
|
||||
git_repository *repo,
|
||||
const git_diff_options *opts,
|
||||
git_diff_list **diff)
|
||||
{
|
||||
git_iterator *a = NULL, *b = NULL;
|
||||
char *prefix = opts ? diff_prefix_from_pathspec(&opts->pathspec) : NULL;
|
||||
|
||||
assert(repo && diff);
|
||||
|
||||
if (git_iterator_for_index_range(&a, repo, prefix, prefix) < 0 ||
|
||||
git_iterator_for_workdir_range(&b, repo, prefix, prefix) < 0)
|
||||
return -1;
|
||||
|
||||
git__free(prefix);
|
||||
|
||||
return diff_from_iterators(repo, opts, a, b, diff);
|
||||
}
|
||||
|
||||
|
||||
int git_diff_workdir_to_tree(
|
||||
git_repository *repo,
|
||||
const git_diff_options *opts,
|
||||
git_tree *old_tree,
|
||||
git_diff_list **diff)
|
||||
{
|
||||
git_iterator *a = NULL, *b = NULL;
|
||||
char *prefix = opts ? diff_prefix_from_pathspec(&opts->pathspec) : NULL;
|
||||
|
||||
assert(repo && old_tree && diff);
|
||||
|
||||
if (git_iterator_for_tree_range(&a, repo, old_tree, prefix, prefix) < 0 ||
|
||||
git_iterator_for_workdir_range(&b, repo, prefix, prefix) < 0)
|
||||
return -1;
|
||||
|
||||
git__free(prefix);
|
||||
|
||||
return diff_from_iterators(repo, opts, a, b, diff);
|
||||
}
|
||||
|
||||
int git_diff_merge(
|
||||
git_diff_list *onto,
|
||||
const git_diff_list *from)
|
||||
{
|
||||
int error = 0;
|
||||
git_pool onto_pool;
|
||||
git_vector onto_new;
|
||||
git_diff_delta *delta;
|
||||
unsigned int i, j;
|
||||
|
||||
assert(onto && from);
|
||||
|
||||
if (!from->deltas.length)
|
||||
return 0;
|
||||
|
||||
if (git_vector_init(&onto_new, onto->deltas.length, diff_delta__cmp) < 0 ||
|
||||
git_pool_init(&onto_pool, 1, 0) < 0)
|
||||
return -1;
|
||||
|
||||
for (i = 0, j = 0; i < onto->deltas.length || j < from->deltas.length; ) {
|
||||
git_diff_delta *o = GIT_VECTOR_GET(&onto->deltas, i);
|
||||
const git_diff_delta *f = GIT_VECTOR_GET(&from->deltas, j);
|
||||
int cmp = !f ? -1 : !o ? 1 : strcmp(o->old_file.path, f->old_file.path);
|
||||
|
||||
if (cmp < 0) {
|
||||
delta = diff_delta__dup(o, &onto_pool);
|
||||
i++;
|
||||
} else if (cmp > 0) {
|
||||
delta = diff_delta__dup(f, &onto_pool);
|
||||
j++;
|
||||
} else {
|
||||
delta = diff_delta__merge_like_cgit(o, f, &onto_pool);
|
||||
i++;
|
||||
j++;
|
||||
}
|
||||
|
||||
if ((error = !delta ? -1 : git_vector_insert(&onto_new, delta)) < 0)
|
||||
break;
|
||||
}
|
||||
|
||||
if (!error) {
|
||||
git_vector_swap(&onto->deltas, &onto_new);
|
||||
git_pool_swap(&onto->pool, &onto_pool);
|
||||
onto->new_src = from->new_src;
|
||||
}
|
||||
|
||||
git_vector_foreach(&onto_new, i, delta)
|
||||
git__free(delta);
|
||||
git_vector_free(&onto_new);
|
||||
git_pool_clear(&onto_pool);
|
||||
|
||||
return error;
|
||||
}
|
||||
|
||||
609
test/fixtures/c/git.c
vendored
Normal file
609
test/fixtures/c/git.c
vendored
Normal file
@@ -0,0 +1,609 @@
|
||||
#include "builtin.h"
|
||||
#include "cache.h"
|
||||
#include "exec_cmd.h"
|
||||
#include "help.h"
|
||||
#include "quote.h"
|
||||
#include "run-command.h"
|
||||
|
||||
const char git_usage_string[] =
|
||||
"git [--version] [--exec-path[=<path>]] [--html-path] [--man-path] [--info-path]\n"
|
||||
" [-p|--paginate|--no-pager] [--no-replace-objects] [--bare]\n"
|
||||
" [--git-dir=<path>] [--work-tree=<path>] [--namespace=<name>]\n"
|
||||
" [-c name=value] [--help]\n"
|
||||
" <command> [<args>]";
|
||||
|
||||
const char git_more_info_string[] =
|
||||
N_("See 'git help <command>' for more information on a specific command.");
|
||||
|
||||
static struct startup_info git_startup_info;
|
||||
static int use_pager = -1;
|
||||
struct pager_config {
|
||||
const char *cmd;
|
||||
int want;
|
||||
char *value;
|
||||
};
|
||||
|
||||
static int pager_command_config(const char *var, const char *value, void *data)
|
||||
{
|
||||
struct pager_config *c = data;
|
||||
if (!prefixcmp(var, "pager.") && !strcmp(var + 6, c->cmd)) {
|
||||
int b = git_config_maybe_bool(var, value);
|
||||
if (b >= 0)
|
||||
c->want = b;
|
||||
else {
|
||||
c->want = 1;
|
||||
c->value = xstrdup(value);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* returns 0 for "no pager", 1 for "use pager", and -1 for "not specified" */
|
||||
int check_pager_config(const char *cmd)
|
||||
{
|
||||
struct pager_config c;
|
||||
c.cmd = cmd;
|
||||
c.want = -1;
|
||||
c.value = NULL;
|
||||
git_config(pager_command_config, &c);
|
||||
if (c.value)
|
||||
pager_program = c.value;
|
||||
return c.want;
|
||||
}
|
||||
|
||||
static void commit_pager_choice(void) {
|
||||
switch (use_pager) {
|
||||
case 0:
|
||||
setenv("GIT_PAGER", "cat", 1);
|
||||
break;
|
||||
case 1:
|
||||
setup_pager();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static int handle_options(const char ***argv, int *argc, int *envchanged)
|
||||
{
|
||||
const char **orig_argv = *argv;
|
||||
|
||||
while (*argc > 0) {
|
||||
const char *cmd = (*argv)[0];
|
||||
if (cmd[0] != '-')
|
||||
break;
|
||||
|
||||
/*
|
||||
* For legacy reasons, the "version" and "help"
|
||||
* commands can be written with "--" prepended
|
||||
* to make them look like flags.
|
||||
*/
|
||||
if (!strcmp(cmd, "--help") || !strcmp(cmd, "--version"))
|
||||
break;
|
||||
|
||||
/*
|
||||
* Check remaining flags.
|
||||
*/
|
||||
if (!prefixcmp(cmd, "--exec-path")) {
|
||||
cmd += 11;
|
||||
if (*cmd == '=')
|
||||
git_set_argv_exec_path(cmd + 1);
|
||||
else {
|
||||
puts(git_exec_path());
|
||||
exit(0);
|
||||
}
|
||||
} else if (!strcmp(cmd, "--html-path")) {
|
||||
puts(system_path(GIT_HTML_PATH));
|
||||
exit(0);
|
||||
} else if (!strcmp(cmd, "--man-path")) {
|
||||
puts(system_path(GIT_MAN_PATH));
|
||||
exit(0);
|
||||
} else if (!strcmp(cmd, "--info-path")) {
|
||||
puts(system_path(GIT_INFO_PATH));
|
||||
exit(0);
|
||||
} else if (!strcmp(cmd, "-p") || !strcmp(cmd, "--paginate")) {
|
||||
use_pager = 1;
|
||||
} else if (!strcmp(cmd, "--no-pager")) {
|
||||
use_pager = 0;
|
||||
if (envchanged)
|
||||
*envchanged = 1;
|
||||
} else if (!strcmp(cmd, "--no-replace-objects")) {
|
||||
read_replace_refs = 0;
|
||||
setenv(NO_REPLACE_OBJECTS_ENVIRONMENT, "1", 1);
|
||||
if (envchanged)
|
||||
*envchanged = 1;
|
||||
} else if (!strcmp(cmd, "--git-dir")) {
|
||||
if (*argc < 2) {
|
||||
fprintf(stderr, "No directory given for --git-dir.\n" );
|
||||
usage(git_usage_string);
|
||||
}
|
||||
setenv(GIT_DIR_ENVIRONMENT, (*argv)[1], 1);
|
||||
if (envchanged)
|
||||
*envchanged = 1;
|
||||
(*argv)++;
|
||||
(*argc)--;
|
||||
} else if (!prefixcmp(cmd, "--git-dir=")) {
|
||||
setenv(GIT_DIR_ENVIRONMENT, cmd + 10, 1);
|
||||
if (envchanged)
|
||||
*envchanged = 1;
|
||||
} else if (!strcmp(cmd, "--namespace")) {
|
||||
if (*argc < 2) {
|
||||
fprintf(stderr, "No namespace given for --namespace.\n" );
|
||||
usage(git_usage_string);
|
||||
}
|
||||
setenv(GIT_NAMESPACE_ENVIRONMENT, (*argv)[1], 1);
|
||||
if (envchanged)
|
||||
*envchanged = 1;
|
||||
(*argv)++;
|
||||
(*argc)--;
|
||||
} else if (!prefixcmp(cmd, "--namespace=")) {
|
||||
setenv(GIT_NAMESPACE_ENVIRONMENT, cmd + 12, 1);
|
||||
if (envchanged)
|
||||
*envchanged = 1;
|
||||
} else if (!strcmp(cmd, "--work-tree")) {
|
||||
if (*argc < 2) {
|
||||
fprintf(stderr, "No directory given for --work-tree.\n" );
|
||||
usage(git_usage_string);
|
||||
}
|
||||
setenv(GIT_WORK_TREE_ENVIRONMENT, (*argv)[1], 1);
|
||||
if (envchanged)
|
||||
*envchanged = 1;
|
||||
(*argv)++;
|
||||
(*argc)--;
|
||||
} else if (!prefixcmp(cmd, "--work-tree=")) {
|
||||
setenv(GIT_WORK_TREE_ENVIRONMENT, cmd + 12, 1);
|
||||
if (envchanged)
|
||||
*envchanged = 1;
|
||||
} else if (!strcmp(cmd, "--bare")) {
|
||||
static char git_dir[PATH_MAX+1];
|
||||
is_bare_repository_cfg = 1;
|
||||
setenv(GIT_DIR_ENVIRONMENT, getcwd(git_dir, sizeof(git_dir)), 0);
|
||||
if (envchanged)
|
||||
*envchanged = 1;
|
||||
} else if (!strcmp(cmd, "-c")) {
|
||||
if (*argc < 2) {
|
||||
fprintf(stderr, "-c expects a configuration string\n" );
|
||||
usage(git_usage_string);
|
||||
}
|
||||
git_config_push_parameter((*argv)[1]);
|
||||
(*argv)++;
|
||||
(*argc)--;
|
||||
} else {
|
||||
fprintf(stderr, "Unknown option: %s\n", cmd);
|
||||
usage(git_usage_string);
|
||||
}
|
||||
|
||||
(*argv)++;
|
||||
(*argc)--;
|
||||
}
|
||||
return (*argv) - orig_argv;
|
||||
}
|
||||
|
||||
static int handle_alias(int *argcp, const char ***argv)
|
||||
{
|
||||
int envchanged = 0, ret = 0, saved_errno = errno;
|
||||
const char *subdir;
|
||||
int count, option_count;
|
||||
const char **new_argv;
|
||||
const char *alias_command;
|
||||
char *alias_string;
|
||||
int unused_nongit;
|
||||
|
||||
subdir = setup_git_directory_gently(&unused_nongit);
|
||||
|
||||
alias_command = (*argv)[0];
|
||||
alias_string = alias_lookup(alias_command);
|
||||
if (alias_string) {
|
||||
if (alias_string[0] == '!') {
|
||||
const char **alias_argv;
|
||||
int argc = *argcp, i;
|
||||
|
||||
commit_pager_choice();
|
||||
|
||||
/* build alias_argv */
|
||||
alias_argv = xmalloc(sizeof(*alias_argv) * (argc + 1));
|
||||
alias_argv[0] = alias_string + 1;
|
||||
for (i = 1; i < argc; ++i)
|
||||
alias_argv[i] = (*argv)[i];
|
||||
alias_argv[argc] = NULL;
|
||||
|
||||
ret = run_command_v_opt(alias_argv, RUN_USING_SHELL);
|
||||
if (ret >= 0) /* normal exit */
|
||||
exit(ret);
|
||||
|
||||
die_errno("While expanding alias '%s': '%s'",
|
||||
alias_command, alias_string + 1);
|
||||
}
|
||||
count = split_cmdline(alias_string, &new_argv);
|
||||
if (count < 0)
|
||||
die("Bad alias.%s string: %s", alias_command,
|
||||
split_cmdline_strerror(count));
|
||||
option_count = handle_options(&new_argv, &count, &envchanged);
|
||||
if (envchanged)
|
||||
die("alias '%s' changes environment variables\n"
|
||||
"You can use '!git' in the alias to do this.",
|
||||
alias_command);
|
||||
memmove(new_argv - option_count, new_argv,
|
||||
count * sizeof(char *));
|
||||
new_argv -= option_count;
|
||||
|
||||
if (count < 1)
|
||||
die("empty alias for %s", alias_command);
|
||||
|
||||
if (!strcmp(alias_command, new_argv[0]))
|
||||
die("recursive alias: %s", alias_command);
|
||||
|
||||
trace_argv_printf(new_argv,
|
||||
"trace: alias expansion: %s =>",
|
||||
alias_command);
|
||||
|
||||
new_argv = xrealloc(new_argv, sizeof(char *) *
|
||||
(count + *argcp));
|
||||
/* insert after command name */
|
||||
memcpy(new_argv + count, *argv + 1, sizeof(char *) * *argcp);
|
||||
|
||||
*argv = new_argv;
|
||||
*argcp += count - 1;
|
||||
|
||||
ret = 1;
|
||||
}
|
||||
|
||||
if (subdir && chdir(subdir))
|
||||
die_errno("Cannot change to '%s'", subdir);
|
||||
|
||||
errno = saved_errno;
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
const char git_version_string[] = GIT_VERSION;
|
||||
|
||||
#define RUN_SETUP (1<<0)
|
||||
#define RUN_SETUP_GENTLY (1<<1)
|
||||
#define USE_PAGER (1<<2)
|
||||
/*
|
||||
* require working tree to be present -- anything uses this needs
|
||||
* RUN_SETUP for reading from the configuration file.
|
||||
*/
|
||||
#define NEED_WORK_TREE (1<<3)
|
||||
|
||||
struct cmd_struct {
|
||||
const char *cmd;
|
||||
int (*fn)(int, const char **, const char *);
|
||||
int option;
|
||||
};
|
||||
|
||||
static int run_builtin(struct cmd_struct *p, int argc, const char **argv)
|
||||
{
|
||||
int status, help;
|
||||
struct stat st;
|
||||
const char *prefix;
|
||||
|
||||
prefix = NULL;
|
||||
help = argc == 2 && !strcmp(argv[1], "-h");
|
||||
if (!help) {
|
||||
if (p->option & RUN_SETUP)
|
||||
prefix = setup_git_directory();
|
||||
if (p->option & RUN_SETUP_GENTLY) {
|
||||
int nongit_ok;
|
||||
prefix = setup_git_directory_gently(&nongit_ok);
|
||||
}
|
||||
|
||||
if (use_pager == -1 && p->option & (RUN_SETUP | RUN_SETUP_GENTLY))
|
||||
use_pager = check_pager_config(p->cmd);
|
||||
if (use_pager == -1 && p->option & USE_PAGER)
|
||||
use_pager = 1;
|
||||
|
||||
if ((p->option & (RUN_SETUP | RUN_SETUP_GENTLY)) &&
|
||||
startup_info->have_repository) /* get_git_dir() may set up repo, avoid that */
|
||||
trace_repo_setup(prefix);
|
||||
}
|
||||
commit_pager_choice();
|
||||
|
||||
if (!help && p->option & NEED_WORK_TREE)
|
||||
setup_work_tree();
|
||||
|
||||
trace_argv_printf(argv, "trace: built-in: git");
|
||||
|
||||
status = p->fn(argc, argv, prefix);
|
||||
if (status)
|
||||
return status;
|
||||
|
||||
/* Somebody closed stdout? */
|
||||
if (fstat(fileno(stdout), &st))
|
||||
return 0;
|
||||
/* Ignore write errors for pipes and sockets.. */
|
||||
if (S_ISFIFO(st.st_mode) || S_ISSOCK(st.st_mode))
|
||||
return 0;
|
||||
|
||||
/* Check for ENOSPC and EIO errors.. */
|
||||
if (fflush(stdout))
|
||||
die_errno("write failure on standard output");
|
||||
if (ferror(stdout))
|
||||
die("unknown write failure on standard output");
|
||||
if (fclose(stdout))
|
||||
die_errno("close failed on standard output");
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void handle_internal_command(int argc, const char **argv)
|
||||
{
|
||||
const char *cmd = argv[0];
|
||||
static struct cmd_struct commands[] = {
|
||||
{ "add", cmd_add, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "annotate", cmd_annotate, RUN_SETUP },
|
||||
{ "apply", cmd_apply, RUN_SETUP_GENTLY },
|
||||
{ "archive", cmd_archive },
|
||||
{ "bisect--helper", cmd_bisect__helper, RUN_SETUP },
|
||||
{ "blame", cmd_blame, RUN_SETUP },
|
||||
{ "branch", cmd_branch, RUN_SETUP },
|
||||
{ "bundle", cmd_bundle, RUN_SETUP_GENTLY },
|
||||
{ "cat-file", cmd_cat_file, RUN_SETUP },
|
||||
{ "check-attr", cmd_check_attr, RUN_SETUP },
|
||||
{ "check-ref-format", cmd_check_ref_format },
|
||||
{ "checkout", cmd_checkout, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "checkout-index", cmd_checkout_index,
|
||||
RUN_SETUP | NEED_WORK_TREE},
|
||||
{ "cherry", cmd_cherry, RUN_SETUP },
|
||||
{ "cherry-pick", cmd_cherry_pick, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "clean", cmd_clean, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "clone", cmd_clone },
|
||||
{ "column", cmd_column, RUN_SETUP_GENTLY },
|
||||
{ "commit", cmd_commit, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "commit-tree", cmd_commit_tree, RUN_SETUP },
|
||||
{ "config", cmd_config, RUN_SETUP_GENTLY },
|
||||
{ "count-objects", cmd_count_objects, RUN_SETUP },
|
||||
{ "describe", cmd_describe, RUN_SETUP },
|
||||
{ "diff", cmd_diff },
|
||||
{ "diff-files", cmd_diff_files, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "diff-index", cmd_diff_index, RUN_SETUP },
|
||||
{ "diff-tree", cmd_diff_tree, RUN_SETUP },
|
||||
{ "fast-export", cmd_fast_export, RUN_SETUP },
|
||||
{ "fetch", cmd_fetch, RUN_SETUP },
|
||||
{ "fetch-pack", cmd_fetch_pack, RUN_SETUP },
|
||||
{ "fmt-merge-msg", cmd_fmt_merge_msg, RUN_SETUP },
|
||||
{ "for-each-ref", cmd_for_each_ref, RUN_SETUP },
|
||||
{ "format-patch", cmd_format_patch, RUN_SETUP },
|
||||
{ "fsck", cmd_fsck, RUN_SETUP },
|
||||
{ "fsck-objects", cmd_fsck, RUN_SETUP },
|
||||
{ "gc", cmd_gc, RUN_SETUP },
|
||||
{ "get-tar-commit-id", cmd_get_tar_commit_id },
|
||||
{ "grep", cmd_grep, RUN_SETUP_GENTLY },
|
||||
{ "hash-object", cmd_hash_object },
|
||||
{ "help", cmd_help },
|
||||
{ "index-pack", cmd_index_pack, RUN_SETUP_GENTLY },
|
||||
{ "init", cmd_init_db },
|
||||
{ "init-db", cmd_init_db },
|
||||
{ "log", cmd_log, RUN_SETUP },
|
||||
{ "ls-files", cmd_ls_files, RUN_SETUP },
|
||||
{ "ls-remote", cmd_ls_remote, RUN_SETUP_GENTLY },
|
||||
{ "ls-tree", cmd_ls_tree, RUN_SETUP },
|
||||
{ "mailinfo", cmd_mailinfo },
|
||||
{ "mailsplit", cmd_mailsplit },
|
||||
{ "merge", cmd_merge, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "merge-base", cmd_merge_base, RUN_SETUP },
|
||||
{ "merge-file", cmd_merge_file, RUN_SETUP_GENTLY },
|
||||
{ "merge-index", cmd_merge_index, RUN_SETUP },
|
||||
{ "merge-ours", cmd_merge_ours, RUN_SETUP },
|
||||
{ "merge-recursive", cmd_merge_recursive, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "merge-recursive-ours", cmd_merge_recursive, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "merge-recursive-theirs", cmd_merge_recursive, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "merge-subtree", cmd_merge_recursive, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "merge-tree", cmd_merge_tree, RUN_SETUP },
|
||||
{ "mktag", cmd_mktag, RUN_SETUP },
|
||||
{ "mktree", cmd_mktree, RUN_SETUP },
|
||||
{ "mv", cmd_mv, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "name-rev", cmd_name_rev, RUN_SETUP },
|
||||
{ "notes", cmd_notes, RUN_SETUP },
|
||||
{ "pack-objects", cmd_pack_objects, RUN_SETUP },
|
||||
{ "pack-redundant", cmd_pack_redundant, RUN_SETUP },
|
||||
{ "pack-refs", cmd_pack_refs, RUN_SETUP },
|
||||
{ "patch-id", cmd_patch_id },
|
||||
{ "peek-remote", cmd_ls_remote, RUN_SETUP_GENTLY },
|
||||
{ "pickaxe", cmd_blame, RUN_SETUP },
|
||||
{ "prune", cmd_prune, RUN_SETUP },
|
||||
{ "prune-packed", cmd_prune_packed, RUN_SETUP },
|
||||
{ "push", cmd_push, RUN_SETUP },
|
||||
{ "read-tree", cmd_read_tree, RUN_SETUP },
|
||||
{ "receive-pack", cmd_receive_pack },
|
||||
{ "reflog", cmd_reflog, RUN_SETUP },
|
||||
{ "remote", cmd_remote, RUN_SETUP },
|
||||
{ "remote-ext", cmd_remote_ext },
|
||||
{ "remote-fd", cmd_remote_fd },
|
||||
{ "replace", cmd_replace, RUN_SETUP },
|
||||
{ "repo-config", cmd_repo_config, RUN_SETUP_GENTLY },
|
||||
{ "rerere", cmd_rerere, RUN_SETUP },
|
||||
{ "reset", cmd_reset, RUN_SETUP },
|
||||
{ "rev-list", cmd_rev_list, RUN_SETUP },
|
||||
{ "rev-parse", cmd_rev_parse },
|
||||
{ "revert", cmd_revert, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "rm", cmd_rm, RUN_SETUP },
|
||||
{ "send-pack", cmd_send_pack, RUN_SETUP },
|
||||
{ "shortlog", cmd_shortlog, RUN_SETUP_GENTLY | USE_PAGER },
|
||||
{ "show", cmd_show, RUN_SETUP },
|
||||
{ "show-branch", cmd_show_branch, RUN_SETUP },
|
||||
{ "show-ref", cmd_show_ref, RUN_SETUP },
|
||||
{ "stage", cmd_add, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "status", cmd_status, RUN_SETUP | NEED_WORK_TREE },
|
||||
{ "stripspace", cmd_stripspace },
|
||||
{ "symbolic-ref", cmd_symbolic_ref, RUN_SETUP },
|
||||
{ "tag", cmd_tag, RUN_SETUP },
|
||||
{ "tar-tree", cmd_tar_tree },
|
||||
{ "unpack-file", cmd_unpack_file, RUN_SETUP },
|
||||
{ "unpack-objects", cmd_unpack_objects, RUN_SETUP },
|
||||
{ "update-index", cmd_update_index, RUN_SETUP },
|
||||
{ "update-ref", cmd_update_ref, RUN_SETUP },
|
||||
{ "update-server-info", cmd_update_server_info, RUN_SETUP },
|
||||
{ "upload-archive", cmd_upload_archive },
|
||||
{ "upload-archive--writer", cmd_upload_archive_writer },
|
||||
{ "var", cmd_var, RUN_SETUP_GENTLY },
|
||||
{ "verify-pack", cmd_verify_pack },
|
||||
{ "verify-tag", cmd_verify_tag, RUN_SETUP },
|
||||
{ "version", cmd_version },
|
||||
{ "whatchanged", cmd_whatchanged, RUN_SETUP },
|
||||
{ "write-tree", cmd_write_tree, RUN_SETUP },
|
||||
};
|
||||
int i;
|
||||
static const char ext[] = STRIP_EXTENSION;
|
||||
|
||||
if (sizeof(ext) > 1) {
|
||||
i = strlen(argv[0]) - strlen(ext);
|
||||
if (i > 0 && !strcmp(argv[0] + i, ext)) {
|
||||
char *argv0 = xstrdup(argv[0]);
|
||||
argv[0] = cmd = argv0;
|
||||
argv0[i] = '\0';
|
||||
}
|
||||
}
|
||||
|
||||
/* Turn "git cmd --help" into "git help cmd" */
|
||||
if (argc > 1 && !strcmp(argv[1], "--help")) {
|
||||
argv[1] = argv[0];
|
||||
argv[0] = cmd = "help";
|
||||
}
|
||||
|
||||
for (i = 0; i < ARRAY_SIZE(commands); i++) {
|
||||
struct cmd_struct *p = commands+i;
|
||||
if (strcmp(p->cmd, cmd))
|
||||
continue;
|
||||
exit(run_builtin(p, argc, argv));
|
||||
}
|
||||
}
|
||||
|
||||
static void execv_dashed_external(const char **argv)
|
||||
{
|
||||
struct strbuf cmd = STRBUF_INIT;
|
||||
const char *tmp;
|
||||
int status;
|
||||
|
||||
if (use_pager == -1)
|
||||
use_pager = check_pager_config(argv[0]);
|
||||
commit_pager_choice();
|
||||
|
||||
strbuf_addf(&cmd, "git-%s", argv[0]);
|
||||
|
||||
/*
|
||||
* argv[0] must be the git command, but the argv array
|
||||
* belongs to the caller, and may be reused in
|
||||
* subsequent loop iterations. Save argv[0] and
|
||||
* restore it on error.
|
||||
*/
|
||||
tmp = argv[0];
|
||||
argv[0] = cmd.buf;
|
||||
|
||||
trace_argv_printf(argv, "trace: exec:");
|
||||
|
||||
/*
|
||||
* if we fail because the command is not found, it is
|
||||
* OK to return. Otherwise, we just pass along the status code.
|
||||
*/
|
||||
status = run_command_v_opt(argv, RUN_SILENT_EXEC_FAILURE | RUN_CLEAN_ON_EXIT);
|
||||
if (status >= 0 || errno != ENOENT)
|
||||
exit(status);
|
||||
|
||||
argv[0] = tmp;
|
||||
|
||||
strbuf_release(&cmd);
|
||||
}
|
||||
|
||||
static int run_argv(int *argcp, const char ***argv)
|
||||
{
|
||||
int done_alias = 0;
|
||||
|
||||
while (1) {
|
||||
/* See if it's an internal command */
|
||||
handle_internal_command(*argcp, *argv);
|
||||
|
||||
/* .. then try the external ones */
|
||||
execv_dashed_external(*argv);
|
||||
|
||||
/* It could be an alias -- this works around the insanity
|
||||
* of overriding "git log" with "git show" by having
|
||||
* alias.log = show
|
||||
*/
|
||||
if (done_alias || !handle_alias(argcp, argv))
|
||||
break;
|
||||
done_alias = 1;
|
||||
}
|
||||
|
||||
return done_alias;
|
||||
}
|
||||
|
||||
|
||||
int main(int argc, const char **argv)
|
||||
{
|
||||
const char *cmd;
|
||||
|
||||
startup_info = &git_startup_info;
|
||||
|
||||
cmd = git_extract_argv0_path(argv[0]);
|
||||
if (!cmd)
|
||||
cmd = "git-help";
|
||||
|
||||
git_setup_gettext();
|
||||
|
||||
/*
|
||||
* "git-xxxx" is the same as "git xxxx", but we obviously:
|
||||
*
|
||||
* - cannot take flags in between the "git" and the "xxxx".
|
||||
* - cannot execute it externally (since it would just do
|
||||
* the same thing over again)
|
||||
*
|
||||
* So we just directly call the internal command handler, and
|
||||
* die if that one cannot handle it.
|
||||
*/
|
||||
if (!prefixcmp(cmd, "git-")) {
|
||||
cmd += 4;
|
||||
argv[0] = cmd;
|
||||
handle_internal_command(argc, argv);
|
||||
die("cannot handle %s internally", cmd);
|
||||
}
|
||||
|
||||
/* Look for flags.. */
|
||||
argv++;
|
||||
argc--;
|
||||
handle_options(&argv, &argc, NULL);
|
||||
if (argc > 0) {
|
||||
if (!prefixcmp(argv[0], "--"))
|
||||
argv[0] += 2;
|
||||
} else {
|
||||
/* The user didn't specify a command; give them help */
|
||||
commit_pager_choice();
|
||||
printf("usage: %s\n\n", git_usage_string);
|
||||
list_common_cmds_help();
|
||||
printf("\n%s\n", git_more_info_string);
|
||||
exit(1);
|
||||
}
|
||||
cmd = argv[0];
|
||||
|
||||
/*
|
||||
* We use PATH to find git commands, but we prepend some higher
|
||||
* precedence paths: the "--exec-path" option, the GIT_EXEC_PATH
|
||||
* environment, and the $(gitexecdir) from the Makefile at build
|
||||
* time.
|
||||
*/
|
||||
setup_path();
|
||||
|
||||
while (1) {
|
||||
static int done_help = 0;
|
||||
static int was_alias = 0;
|
||||
was_alias = run_argv(&argc, &argv);
|
||||
if (errno != ENOENT)
|
||||
break;
|
||||
if (was_alias) {
|
||||
fprintf(stderr, "Expansion of alias '%s' failed; "
|
||||
"'%s' is not a git command\n",
|
||||
cmd, argv[0]);
|
||||
exit(1);
|
||||
}
|
||||
if (!done_help) {
|
||||
cmd = argv[0] = help_unknown_cmd(cmd);
|
||||
done_help = 1;
|
||||
} else
|
||||
break;
|
||||
}
|
||||
|
||||
fprintf(stderr, "Failed to run command '%s': %s\n",
|
||||
cmd, strerror(errno));
|
||||
|
||||
return 1;
|
||||
}
|
||||
74
test/fixtures/c/hash.c
vendored
Normal file
74
test/fixtures/c/hash.c
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
/*
|
||||
* Copyright (C) 2009-2012 the libgit2 contributors
|
||||
*
|
||||
* This file is part of libgit2, distributed under the GNU GPL v2 with
|
||||
* a Linking Exception. For full terms see the included COPYING file.
|
||||
*/
|
||||
|
||||
#include "common.h"
|
||||
#include "hash.h"
|
||||
|
||||
#if defined(PPC_SHA1)
|
||||
# include "ppc/sha1.h"
|
||||
#else
|
||||
# include "sha1.h"
|
||||
#endif
|
||||
|
||||
struct git_hash_ctx {
|
||||
SHA_CTX c;
|
||||
};
|
||||
|
||||
git_hash_ctx *git_hash_new_ctx(void)
|
||||
{
|
||||
git_hash_ctx *ctx = git__malloc(sizeof(*ctx));
|
||||
|
||||
if (!ctx)
|
||||
return NULL;
|
||||
|
||||
SHA1_Init(&ctx->c);
|
||||
|
||||
return ctx;
|
||||
}
|
||||
|
||||
void git_hash_free_ctx(git_hash_ctx *ctx)
|
||||
{
|
||||
git__free(ctx);
|
||||
}
|
||||
|
||||
void git_hash_init(git_hash_ctx *ctx)
|
||||
{
|
||||
assert(ctx);
|
||||
SHA1_Init(&ctx->c);
|
||||
}
|
||||
|
||||
void git_hash_update(git_hash_ctx *ctx, const void *data, size_t len)
|
||||
{
|
||||
assert(ctx);
|
||||
SHA1_Update(&ctx->c, data, len);
|
||||
}
|
||||
|
||||
void git_hash_final(git_oid *out, git_hash_ctx *ctx)
|
||||
{
|
||||
assert(ctx);
|
||||
SHA1_Final(out->id, &ctx->c);
|
||||
}
|
||||
|
||||
void git_hash_buf(git_oid *out, const void *data, size_t len)
|
||||
{
|
||||
SHA_CTX c;
|
||||
|
||||
SHA1_Init(&c);
|
||||
SHA1_Update(&c, data, len);
|
||||
SHA1_Final(out->id, &c);
|
||||
}
|
||||
|
||||
void git_hash_vec(git_oid *out, git_buf_vec *vec, size_t n)
|
||||
{
|
||||
SHA_CTX c;
|
||||
size_t i;
|
||||
|
||||
SHA1_Init(&c);
|
||||
for (i = 0; i < n; i++)
|
||||
SHA1_Update(&c, vec[i].data, vec[i].len);
|
||||
SHA1_Final(out->id, &c);
|
||||
}
|
||||
2059
test/fixtures/c/http_parser.c
vendored
Normal file
2059
test/fixtures/c/http_parser.c
vendored
Normal file
File diff suppressed because it is too large
Load Diff
318
test/fixtures/c/http_parser.h
vendored
Normal file
318
test/fixtures/c/http_parser.h
vendored
Normal file
@@ -0,0 +1,318 @@
|
||||
/* Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to
|
||||
* deal in the Software without restriction, including without limitation the
|
||||
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
* sell copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
* IN THE SOFTWARE.
|
||||
*/
|
||||
#ifndef http_parser_h
|
||||
#define http_parser_h
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#define HTTP_PARSER_VERSION_MAJOR 1
|
||||
#define HTTP_PARSER_VERSION_MINOR 0
|
||||
|
||||
#include <sys/types.h>
|
||||
#if defined(_WIN32) && !defined(__MINGW32__) && (!defined(_MSC_VER) || _MSC_VER<1600)
|
||||
typedef __int8 int8_t;
|
||||
typedef unsigned __int8 uint8_t;
|
||||
typedef __int16 int16_t;
|
||||
typedef unsigned __int16 uint16_t;
|
||||
typedef __int32 int32_t;
|
||||
typedef unsigned __int32 uint32_t;
|
||||
typedef __int64 int64_t;
|
||||
typedef unsigned __int64 uint64_t;
|
||||
|
||||
typedef unsigned int size_t;
|
||||
typedef int ssize_t;
|
||||
#else
|
||||
#include <stdint.h>
|
||||
#endif
|
||||
|
||||
/* Compile with -DHTTP_PARSER_STRICT=0 to make less checks, but run
|
||||
* faster
|
||||
*/
|
||||
#ifndef HTTP_PARSER_STRICT
|
||||
# define HTTP_PARSER_STRICT 1
|
||||
#endif
|
||||
|
||||
/* Compile with -DHTTP_PARSER_DEBUG=1 to add extra debugging information to
|
||||
* the error reporting facility.
|
||||
*/
|
||||
#ifndef HTTP_PARSER_DEBUG
|
||||
# define HTTP_PARSER_DEBUG 0
|
||||
#endif
|
||||
|
||||
|
||||
/* Maximium header size allowed */
|
||||
#define HTTP_MAX_HEADER_SIZE (80*1024)
|
||||
|
||||
|
||||
typedef struct http_parser http_parser;
|
||||
typedef struct http_parser_settings http_parser_settings;
|
||||
|
||||
|
||||
/* Callbacks should return non-zero to indicate an error. The parser will
|
||||
* then halt execution.
|
||||
*
|
||||
* The one exception is on_headers_complete. In a HTTP_RESPONSE parser
|
||||
* returning '1' from on_headers_complete will tell the parser that it
|
||||
* should not expect a body. This is used when receiving a response to a
|
||||
* HEAD request which may contain 'Content-Length' or 'Transfer-Encoding:
|
||||
* chunked' headers that indicate the presence of a body.
|
||||
*
|
||||
* http_data_cb does not return data chunks. It will be call arbitrarally
|
||||
* many times for each string. E.G. you might get 10 callbacks for "on_path"
|
||||
* each providing just a few characters more data.
|
||||
*/
|
||||
typedef int (*http_data_cb) (http_parser*, const char *at, size_t length);
|
||||
typedef int (*http_cb) (http_parser*);
|
||||
|
||||
|
||||
/* Request Methods */
|
||||
#define HTTP_METHOD_MAP(XX) \
|
||||
XX(0, DELETE, DELETE) \
|
||||
XX(1, GET, GET) \
|
||||
XX(2, HEAD, HEAD) \
|
||||
XX(3, POST, POST) \
|
||||
XX(4, PUT, PUT) \
|
||||
/* pathological */ \
|
||||
XX(5, CONNECT, CONNECT) \
|
||||
XX(6, OPTIONS, OPTIONS) \
|
||||
XX(7, TRACE, TRACE) \
|
||||
/* webdav */ \
|
||||
XX(8, COPY, COPY) \
|
||||
XX(9, LOCK, LOCK) \
|
||||
XX(10, MKCOL, MKCOL) \
|
||||
XX(11, MOVE, MOVE) \
|
||||
XX(12, PROPFIND, PROPFIND) \
|
||||
XX(13, PROPPATCH, PROPPATCH) \
|
||||
XX(14, SEARCH, SEARCH) \
|
||||
XX(15, UNLOCK, UNLOCK) \
|
||||
/* subversion */ \
|
||||
XX(16, REPORT, REPORT) \
|
||||
XX(17, MKACTIVITY, MKACTIVITY) \
|
||||
XX(18, CHECKOUT, CHECKOUT) \
|
||||
XX(19, MERGE, MERGE) \
|
||||
/* upnp */ \
|
||||
XX(20, MSEARCH, M-SEARCH) \
|
||||
XX(21, NOTIFY, NOTIFY) \
|
||||
XX(22, SUBSCRIBE, SUBSCRIBE) \
|
||||
XX(23, UNSUBSCRIBE, UNSUBSCRIBE) \
|
||||
/* RFC-5789 */ \
|
||||
XX(24, PATCH, PATCH) \
|
||||
XX(25, PURGE, PURGE) \
|
||||
|
||||
enum http_method
|
||||
{
|
||||
#define XX(num, name, string) HTTP_##name = num,
|
||||
HTTP_METHOD_MAP(XX)
|
||||
#undef XX
|
||||
};
|
||||
|
||||
|
||||
enum http_parser_type { HTTP_REQUEST, HTTP_RESPONSE, HTTP_BOTH };
|
||||
|
||||
|
||||
/* Flag values for http_parser.flags field */
|
||||
enum flags
|
||||
{ F_CHUNKED = 1 << 0
|
||||
, F_CONNECTION_KEEP_ALIVE = 1 << 1
|
||||
, F_CONNECTION_CLOSE = 1 << 2
|
||||
, F_TRAILING = 1 << 3
|
||||
, F_UPGRADE = 1 << 4
|
||||
, F_SKIPBODY = 1 << 5
|
||||
};
|
||||
|
||||
|
||||
/* Map for errno-related constants
|
||||
*
|
||||
* The provided argument should be a macro that takes 2 arguments.
|
||||
*/
|
||||
#define HTTP_ERRNO_MAP(XX) \
|
||||
/* No error */ \
|
||||
XX(OK, "success") \
|
||||
\
|
||||
/* Callback-related errors */ \
|
||||
XX(CB_message_begin, "the on_message_begin callback failed") \
|
||||
XX(CB_url, "the on_url callback failed") \
|
||||
XX(CB_header_field, "the on_header_field callback failed") \
|
||||
XX(CB_header_value, "the on_header_value callback failed") \
|
||||
XX(CB_headers_complete, "the on_headers_complete callback failed") \
|
||||
XX(CB_body, "the on_body callback failed") \
|
||||
XX(CB_message_complete, "the on_message_complete callback failed") \
|
||||
\
|
||||
/* Parsing-related errors */ \
|
||||
XX(INVALID_EOF_STATE, "stream ended at an unexpected time") \
|
||||
XX(HEADER_OVERFLOW, \
|
||||
"too many header bytes seen; overflow detected") \
|
||||
XX(CLOSED_CONNECTION, \
|
||||
"data received after completed connection: close message") \
|
||||
XX(INVALID_VERSION, "invalid HTTP version") \
|
||||
XX(INVALID_STATUS, "invalid HTTP status code") \
|
||||
XX(INVALID_METHOD, "invalid HTTP method") \
|
||||
XX(INVALID_URL, "invalid URL") \
|
||||
XX(INVALID_HOST, "invalid host") \
|
||||
XX(INVALID_PORT, "invalid port") \
|
||||
XX(INVALID_PATH, "invalid path") \
|
||||
XX(INVALID_QUERY_STRING, "invalid query string") \
|
||||
XX(INVALID_FRAGMENT, "invalid fragment") \
|
||||
XX(LF_EXPECTED, "LF character expected") \
|
||||
XX(INVALID_HEADER_TOKEN, "invalid character in header") \
|
||||
XX(INVALID_CONTENT_LENGTH, \
|
||||
"invalid character in content-length header") \
|
||||
XX(INVALID_CHUNK_SIZE, \
|
||||
"invalid character in chunk size header") \
|
||||
XX(INVALID_CONSTANT, "invalid constant string") \
|
||||
XX(INVALID_INTERNAL_STATE, "encountered unexpected internal state")\
|
||||
XX(STRICT, "strict mode assertion failed") \
|
||||
XX(PAUSED, "parser is paused") \
|
||||
XX(UNKNOWN, "an unknown error occurred")
|
||||
|
||||
|
||||
/* Define HPE_* values for each errno value above */
|
||||
#define HTTP_ERRNO_GEN(n, s) HPE_##n,
|
||||
enum http_errno {
|
||||
HTTP_ERRNO_MAP(HTTP_ERRNO_GEN)
|
||||
};
|
||||
#undef HTTP_ERRNO_GEN
|
||||
|
||||
|
||||
/* Get an http_errno value from an http_parser */
|
||||
#define HTTP_PARSER_ERRNO(p) ((enum http_errno) (p)->http_errno)
|
||||
|
||||
/* Get the line number that generated the current error */
|
||||
#if HTTP_PARSER_DEBUG
|
||||
#define HTTP_PARSER_ERRNO_LINE(p) ((p)->error_lineno)
|
||||
#else
|
||||
#define HTTP_PARSER_ERRNO_LINE(p) 0
|
||||
#endif
|
||||
|
||||
|
||||
struct http_parser {
|
||||
/** PRIVATE **/
|
||||
unsigned char type : 2; /* enum http_parser_type */
|
||||
unsigned char flags : 6; /* F_* values from 'flags' enum; semi-public */
|
||||
unsigned char state; /* enum state from http_parser.c */
|
||||
unsigned char header_state; /* enum header_state from http_parser.c */
|
||||
unsigned char index; /* index into current matcher */
|
||||
|
||||
uint32_t nread; /* # bytes read in various scenarios */
|
||||
uint64_t content_length; /* # bytes in body (0 if no Content-Length header) */
|
||||
|
||||
/** READ-ONLY **/
|
||||
unsigned short http_major;
|
||||
unsigned short http_minor;
|
||||
unsigned short status_code; /* responses only */
|
||||
unsigned char method; /* requests only */
|
||||
unsigned char http_errno : 7;
|
||||
|
||||
/* 1 = Upgrade header was present and the parser has exited because of that.
|
||||
* 0 = No upgrade header present.
|
||||
* Should be checked when http_parser_execute() returns in addition to
|
||||
* error checking.
|
||||
*/
|
||||
unsigned char upgrade : 1;
|
||||
|
||||
#if HTTP_PARSER_DEBUG
|
||||
uint32_t error_lineno;
|
||||
#endif
|
||||
|
||||
/** PUBLIC **/
|
||||
void *data; /* A pointer to get hook to the "connection" or "socket" object */
|
||||
};
|
||||
|
||||
|
||||
struct http_parser_settings {
|
||||
http_cb on_message_begin;
|
||||
http_data_cb on_url;
|
||||
http_data_cb on_header_field;
|
||||
http_data_cb on_header_value;
|
||||
http_cb on_headers_complete;
|
||||
http_data_cb on_body;
|
||||
http_cb on_message_complete;
|
||||
};
|
||||
|
||||
|
||||
enum http_parser_url_fields
|
||||
{ UF_SCHEMA = 0
|
||||
, UF_HOST = 1
|
||||
, UF_PORT = 2
|
||||
, UF_PATH = 3
|
||||
, UF_QUERY = 4
|
||||
, UF_FRAGMENT = 5
|
||||
, UF_MAX = 6
|
||||
};
|
||||
|
||||
|
||||
/* Result structure for http_parser_parse_url().
|
||||
*
|
||||
* Callers should index into field_data[] with UF_* values iff field_set
|
||||
* has the relevant (1 << UF_*) bit set. As a courtesy to clients (and
|
||||
* because we probably have padding left over), we convert any port to
|
||||
* a uint16_t.
|
||||
*/
|
||||
struct http_parser_url {
|
||||
uint16_t field_set; /* Bitmask of (1 << UF_*) values */
|
||||
uint16_t port; /* Converted UF_PORT string */
|
||||
|
||||
struct {
|
||||
uint16_t off; /* Offset into buffer in which field starts */
|
||||
uint16_t len; /* Length of run in buffer */
|
||||
} field_data[UF_MAX];
|
||||
};
|
||||
|
||||
|
||||
void http_parser_init(http_parser *parser, enum http_parser_type type);
|
||||
|
||||
|
||||
size_t http_parser_execute(http_parser *parser,
|
||||
const http_parser_settings *settings,
|
||||
const char *data,
|
||||
size_t len);
|
||||
|
||||
|
||||
/* If http_should_keep_alive() in the on_headers_complete or
|
||||
* on_message_complete callback returns true, then this will be should be
|
||||
* the last message on the connection.
|
||||
* If you are the server, respond with the "Connection: close" header.
|
||||
* If you are the client, close the connection.
|
||||
*/
|
||||
int http_should_keep_alive(http_parser *parser);
|
||||
|
||||
/* Returns a string version of the HTTP method. */
|
||||
const char *http_method_str(enum http_method m);
|
||||
|
||||
/* Return a string name of the given error */
|
||||
const char *http_errno_name(enum http_errno err);
|
||||
|
||||
/* Return a string description of the given error */
|
||||
const char *http_errno_description(enum http_errno err);
|
||||
|
||||
/* Parse a URL; return nonzero on failure */
|
||||
int http_parser_parse_url(const char *buf, size_t buflen,
|
||||
int is_connect,
|
||||
struct http_parser_url *u);
|
||||
|
||||
/* Pause or un-pause the parser; a nonzero value pauses */
|
||||
void http_parser_pause(http_parser *parser, int paused);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
2551
test/fixtures/c/markdown.c
vendored
Normal file
2551
test/fixtures/c/markdown.c
vendored
Normal file
File diff suppressed because it is too large
Load Diff
462
test/fixtures/c/process.c
vendored
Normal file
462
test/fixtures/c/process.c
vendored
Normal file
@@ -0,0 +1,462 @@
|
||||
/* Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to
|
||||
* deal in the Software without restriction, including without limitation the
|
||||
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
* sell copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
* IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
#include "uv.h"
|
||||
#include "internal.h"
|
||||
|
||||
#include <assert.h>
|
||||
#include <errno.h>
|
||||
#include <sys/wait.h>
|
||||
#include <poll.h>
|
||||
#include <unistd.h>
|
||||
#include <stdio.h>
|
||||
#include <fcntl.h>
|
||||
|
||||
#ifdef __APPLE__
|
||||
# include <TargetConditionals.h>
|
||||
#endif
|
||||
|
||||
#if defined(__APPLE__) && !TARGET_OS_IPHONE
|
||||
# include <crt_externs.h>
|
||||
# define environ (*_NSGetEnviron())
|
||||
#else
|
||||
extern char **environ;
|
||||
#endif
|
||||
|
||||
|
||||
static void uv__chld(EV_P_ ev_child* watcher, int revents) {
|
||||
int status = watcher->rstatus;
|
||||
int exit_status = 0;
|
||||
int term_signal = 0;
|
||||
uv_process_t *process = watcher->data;
|
||||
|
||||
assert(&process->child_watcher == watcher);
|
||||
assert(revents & EV_CHILD);
|
||||
|
||||
ev_child_stop(EV_A_ &process->child_watcher);
|
||||
|
||||
if (WIFEXITED(status)) {
|
||||
exit_status = WEXITSTATUS(status);
|
||||
}
|
||||
|
||||
if (WIFSIGNALED(status)) {
|
||||
term_signal = WTERMSIG(status);
|
||||
}
|
||||
|
||||
if (process->exit_cb) {
|
||||
process->exit_cb(process, exit_status, term_signal);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int uv__make_socketpair(int fds[2], int flags) {
|
||||
#ifdef SOCK_NONBLOCK
|
||||
int fl;
|
||||
|
||||
fl = SOCK_CLOEXEC;
|
||||
|
||||
if (flags & UV__F_NONBLOCK)
|
||||
fl |= SOCK_NONBLOCK;
|
||||
|
||||
if (socketpair(AF_UNIX, SOCK_STREAM|fl, 0, fds) == 0)
|
||||
return 0;
|
||||
|
||||
if (errno != EINVAL)
|
||||
return -1;
|
||||
|
||||
/* errno == EINVAL so maybe the kernel headers lied about
|
||||
* the availability of SOCK_NONBLOCK. This can happen if people
|
||||
* build libuv against newer kernel headers than the kernel
|
||||
* they actually run the software on.
|
||||
*/
|
||||
#endif
|
||||
|
||||
if (socketpair(AF_UNIX, SOCK_STREAM, 0, fds))
|
||||
return -1;
|
||||
|
||||
uv__cloexec(fds[0], 1);
|
||||
uv__cloexec(fds[1], 1);
|
||||
|
||||
if (flags & UV__F_NONBLOCK) {
|
||||
uv__nonblock(fds[0], 1);
|
||||
uv__nonblock(fds[1], 1);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
int uv__make_pipe(int fds[2], int flags) {
|
||||
#if __linux__
|
||||
int fl;
|
||||
|
||||
fl = UV__O_CLOEXEC;
|
||||
|
||||
if (flags & UV__F_NONBLOCK)
|
||||
fl |= UV__O_NONBLOCK;
|
||||
|
||||
if (uv__pipe2(fds, fl) == 0)
|
||||
return 0;
|
||||
|
||||
if (errno != ENOSYS)
|
||||
return -1;
|
||||
#endif
|
||||
|
||||
if (pipe(fds))
|
||||
return -1;
|
||||
|
||||
uv__cloexec(fds[0], 1);
|
||||
uv__cloexec(fds[1], 1);
|
||||
|
||||
if (flags & UV__F_NONBLOCK) {
|
||||
uv__nonblock(fds[0], 1);
|
||||
uv__nonblock(fds[1], 1);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Used for initializing stdio streams like options.stdin_stream. Returns
|
||||
* zero on success.
|
||||
*/
|
||||
static int uv__process_init_stdio(uv_stdio_container_t* container, int fds[2],
|
||||
int writable) {
|
||||
int fd = -1;
|
||||
switch (container->flags & (UV_IGNORE | UV_CREATE_PIPE | UV_INHERIT_FD |
|
||||
UV_INHERIT_STREAM)) {
|
||||
case UV_IGNORE:
|
||||
return 0;
|
||||
case UV_CREATE_PIPE:
|
||||
assert(container->data.stream != NULL);
|
||||
|
||||
if (container->data.stream->type != UV_NAMED_PIPE) {
|
||||
errno = EINVAL;
|
||||
return -1;
|
||||
}
|
||||
|
||||
return uv__make_socketpair(fds, 0);
|
||||
case UV_INHERIT_FD:
|
||||
case UV_INHERIT_STREAM:
|
||||
if (container->flags & UV_INHERIT_FD) {
|
||||
fd = container->data.fd;
|
||||
} else {
|
||||
fd = container->data.stream->fd;
|
||||
}
|
||||
|
||||
if (fd == -1) {
|
||||
errno = EINVAL;
|
||||
return -1;
|
||||
}
|
||||
|
||||
fds[writable ? 1 : 0] = fd;
|
||||
|
||||
return 0;
|
||||
default:
|
||||
assert(0 && "Unexpected flags");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static int uv__process_stdio_flags(uv_stdio_container_t* container,
|
||||
int writable) {
|
||||
if (container->data.stream->type == UV_NAMED_PIPE &&
|
||||
((uv_pipe_t*)container->data.stream)->ipc) {
|
||||
return UV_STREAM_READABLE | UV_STREAM_WRITABLE;
|
||||
} else if (writable) {
|
||||
return UV_STREAM_WRITABLE;
|
||||
} else {
|
||||
return UV_STREAM_READABLE;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static int uv__process_open_stream(uv_stdio_container_t* container, int fds[2],
|
||||
int writable) {
|
||||
int fd = fds[writable ? 1 : 0];
|
||||
int child_fd = fds[writable ? 0 : 1];
|
||||
int flags;
|
||||
|
||||
/* No need to create stream */
|
||||
if (!(container->flags & UV_CREATE_PIPE) || fd < 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
assert(child_fd >= 0);
|
||||
close(child_fd);
|
||||
|
||||
uv__nonblock(fd, 1);
|
||||
flags = uv__process_stdio_flags(container, writable);
|
||||
|
||||
return uv__stream_open((uv_stream_t*)container->data.stream, fd, flags);
|
||||
}
|
||||
|
||||
|
||||
static void uv__process_close_stream(uv_stdio_container_t* container) {
|
||||
if (!(container->flags & UV_CREATE_PIPE)) return;
|
||||
|
||||
uv__stream_close((uv_stream_t*)container->data.stream);
|
||||
}
|
||||
|
||||
|
||||
static void uv__process_child_init(uv_process_options_t options,
|
||||
int stdio_count,
|
||||
int* pipes) {
|
||||
int i;
|
||||
|
||||
if (options.flags & UV_PROCESS_DETACHED) {
|
||||
setsid();
|
||||
}
|
||||
|
||||
/* Dup fds */
|
||||
for (i = 0; i < stdio_count; i++) {
|
||||
/*
|
||||
* stdin has swapped ends of pipe
|
||||
* (it's the only one readable stream)
|
||||
*/
|
||||
int close_fd = i == 0 ? pipes[i * 2 + 1] : pipes[i * 2];
|
||||
int use_fd = i == 0 ? pipes[i * 2] : pipes[i * 2 + 1];
|
||||
|
||||
if (use_fd >= 0) {
|
||||
close(close_fd);
|
||||
} else if (i < 3) {
|
||||
/* `/dev/null` stdin, stdout, stderr even if they've flag UV_IGNORE */
|
||||
use_fd = open("/dev/null", i == 0 ? O_RDONLY : O_RDWR);
|
||||
|
||||
if (use_fd < 0) {
|
||||
perror("failed to open stdio");
|
||||
_exit(127);
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (i != use_fd) {
|
||||
dup2(use_fd, i);
|
||||
close(use_fd);
|
||||
}
|
||||
}
|
||||
|
||||
if (options.cwd && chdir(options.cwd)) {
|
||||
perror("chdir()");
|
||||
_exit(127);
|
||||
}
|
||||
|
||||
if ((options.flags & UV_PROCESS_SETGID) && setgid(options.gid)) {
|
||||
perror("setgid()");
|
||||
_exit(127);
|
||||
}
|
||||
|
||||
if ((options.flags & UV_PROCESS_SETUID) && setuid(options.uid)) {
|
||||
perror("setuid()");
|
||||
_exit(127);
|
||||
}
|
||||
|
||||
environ = options.env;
|
||||
|
||||
execvp(options.file, options.args);
|
||||
perror("execvp()");
|
||||
_exit(127);
|
||||
}
|
||||
|
||||
|
||||
#ifndef SPAWN_WAIT_EXEC
|
||||
# define SPAWN_WAIT_EXEC 1
|
||||
#endif
|
||||
|
||||
int uv_spawn(uv_loop_t* loop, uv_process_t* process,
|
||||
uv_process_options_t options) {
|
||||
/*
|
||||
* Save environ in the case that we get it clobbered
|
||||
* by the child process.
|
||||
*/
|
||||
char** save_our_env = environ;
|
||||
|
||||
int stdio_count = options.stdio_count < 3 ? 3 : options.stdio_count;
|
||||
int* pipes = malloc(2 * stdio_count * sizeof(int));
|
||||
|
||||
#if SPAWN_WAIT_EXEC
|
||||
int signal_pipe[2] = { -1, -1 };
|
||||
struct pollfd pfd;
|
||||
#endif
|
||||
int status;
|
||||
pid_t pid;
|
||||
int i;
|
||||
|
||||
if (pipes == NULL) {
|
||||
errno = ENOMEM;
|
||||
goto error;
|
||||
}
|
||||
|
||||
assert(options.file != NULL);
|
||||
assert(!(options.flags & ~(UV_PROCESS_WINDOWS_VERBATIM_ARGUMENTS |
|
||||
UV_PROCESS_DETACHED |
|
||||
UV_PROCESS_SETGID |
|
||||
UV_PROCESS_SETUID)));
|
||||
|
||||
|
||||
uv__handle_init(loop, (uv_handle_t*)process, UV_PROCESS);
|
||||
loop->counters.process_init++;
|
||||
uv__handle_start(process);
|
||||
|
||||
process->exit_cb = options.exit_cb;
|
||||
|
||||
/* Init pipe pairs */
|
||||
for (i = 0; i < stdio_count; i++) {
|
||||
pipes[i * 2] = -1;
|
||||
pipes[i * 2 + 1] = -1;
|
||||
}
|
||||
|
||||
/* Create socketpairs/pipes, or use raw fd */
|
||||
for (i = 0; i < options.stdio_count; i++) {
|
||||
if (uv__process_init_stdio(&options.stdio[i], pipes + i * 2, i != 0)) {
|
||||
goto error;
|
||||
}
|
||||
}
|
||||
|
||||
/* This pipe is used by the parent to wait until
|
||||
* the child has called `execve()`. We need this
|
||||
* to avoid the following race condition:
|
||||
*
|
||||
* if ((pid = fork()) > 0) {
|
||||
* kill(pid, SIGTERM);
|
||||
* }
|
||||
* else if (pid == 0) {
|
||||
* execve("/bin/cat", argp, envp);
|
||||
* }
|
||||
*
|
||||
* The parent sends a signal immediately after forking.
|
||||
* Since the child may not have called `execve()` yet,
|
||||
* there is no telling what process receives the signal,
|
||||
* our fork or /bin/cat.
|
||||
*
|
||||
* To avoid ambiguity, we create a pipe with both ends
|
||||
* marked close-on-exec. Then, after the call to `fork()`,
|
||||
* the parent polls the read end until it sees POLLHUP.
|
||||
*/
|
||||
#if SPAWN_WAIT_EXEC
|
||||
if (uv__make_pipe(signal_pipe, UV__F_NONBLOCK))
|
||||
goto error;
|
||||
#endif
|
||||
|
||||
pid = fork();
|
||||
|
||||
if (pid == -1) {
|
||||
#if SPAWN_WAIT_EXEC
|
||||
close(signal_pipe[0]);
|
||||
close(signal_pipe[1]);
|
||||
#endif
|
||||
environ = save_our_env;
|
||||
goto error;
|
||||
}
|
||||
|
||||
if (pid == 0) {
|
||||
/* Child */
|
||||
uv__process_child_init(options, stdio_count, pipes);
|
||||
|
||||
/* Execution never reaches here. */
|
||||
}
|
||||
|
||||
/* Parent. */
|
||||
|
||||
/* Restore environment. */
|
||||
environ = save_our_env;
|
||||
|
||||
#if SPAWN_WAIT_EXEC
|
||||
/* POLLHUP signals child has exited or execve()'d. */
|
||||
close(signal_pipe[1]);
|
||||
do {
|
||||
pfd.fd = signal_pipe[0];
|
||||
pfd.events = POLLIN|POLLHUP;
|
||||
pfd.revents = 0;
|
||||
errno = 0, status = poll(&pfd, 1, -1);
|
||||
}
|
||||
while (status == -1 && (errno == EINTR || errno == ENOMEM));
|
||||
|
||||
assert((status == 1) && "poll() on pipe read end failed");
|
||||
close(signal_pipe[0]);
|
||||
#endif
|
||||
|
||||
process->pid = pid;
|
||||
|
||||
ev_child_init(&process->child_watcher, uv__chld, pid, 0);
|
||||
ev_child_start(process->loop->ev, &process->child_watcher);
|
||||
process->child_watcher.data = process;
|
||||
|
||||
for (i = 0; i < options.stdio_count; i++) {
|
||||
if (uv__process_open_stream(&options.stdio[i], pipes + i * 2, i == 0)) {
|
||||
int j;
|
||||
/* Close all opened streams */
|
||||
for (j = 0; j < i; j++) {
|
||||
uv__process_close_stream(&options.stdio[j]);
|
||||
}
|
||||
|
||||
goto error;
|
||||
}
|
||||
}
|
||||
|
||||
free(pipes);
|
||||
|
||||
return 0;
|
||||
|
||||
error:
|
||||
uv__set_sys_error(process->loop, errno);
|
||||
|
||||
for (i = 0; i < stdio_count; i++) {
|
||||
close(pipes[i * 2]);
|
||||
close(pipes[i * 2 + 1]);
|
||||
}
|
||||
|
||||
free(pipes);
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
int uv_process_kill(uv_process_t* process, int signum) {
|
||||
int r = kill(process->pid, signum);
|
||||
|
||||
if (r) {
|
||||
uv__set_sys_error(process->loop, errno);
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
uv_err_t uv_kill(int pid, int signum) {
|
||||
int r = kill(pid, signum);
|
||||
|
||||
if (r) {
|
||||
return uv__new_sys_error(errno);
|
||||
} else {
|
||||
return uv_ok_;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void uv__process_close(uv_process_t* handle) {
|
||||
ev_child_stop(handle->loop->ev, &handle->child_watcher);
|
||||
uv__handle_stop(handle);
|
||||
}
|
||||
129
test/fixtures/c/rdiscount.c
vendored
Normal file
129
test/fixtures/c/rdiscount.c
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
#include <stdio.h>
|
||||
#include "ruby.h"
|
||||
#include "mkdio.h"
|
||||
|
||||
static VALUE rb_cRDiscount;
|
||||
|
||||
static VALUE
|
||||
rb_rdiscount_to_html(int argc, VALUE *argv, VALUE self)
|
||||
{
|
||||
/* grab char pointer to markdown input text */
|
||||
char *res;
|
||||
int szres;
|
||||
VALUE encoding;
|
||||
VALUE text = rb_funcall(self, rb_intern("text"), 0);
|
||||
VALUE buf = rb_str_buf_new(1024);
|
||||
Check_Type(text, T_STRING);
|
||||
|
||||
int flags = rb_rdiscount__get_flags(self);
|
||||
|
||||
MMIOT *doc = mkd_string(RSTRING_PTR(text), RSTRING_LEN(text), flags);
|
||||
|
||||
if ( mkd_compile(doc, flags) ) {
|
||||
szres = mkd_document(doc, &res);
|
||||
|
||||
if ( szres != EOF ) {
|
||||
rb_str_cat(buf, res, szres);
|
||||
rb_str_cat(buf, "\n", 1);
|
||||
}
|
||||
}
|
||||
mkd_cleanup(doc);
|
||||
|
||||
|
||||
/* force the input encoding */
|
||||
if ( rb_respond_to(text, rb_intern("encoding")) ) {
|
||||
encoding = rb_funcall(text, rb_intern("encoding"), 0);
|
||||
rb_funcall(buf, rb_intern("force_encoding"), 1, encoding);
|
||||
}
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
static VALUE
|
||||
rb_rdiscount_toc_content(int argc, VALUE *argv, VALUE self)
|
||||
{
|
||||
char *res;
|
||||
int szres;
|
||||
|
||||
int flags = rb_rdiscount__get_flags(self);
|
||||
|
||||
/* grab char pointer to markdown input text */
|
||||
VALUE text = rb_funcall(self, rb_intern("text"), 0);
|
||||
Check_Type(text, T_STRING);
|
||||
|
||||
/* allocate a ruby string buffer and wrap it in a stream */
|
||||
VALUE buf = rb_str_buf_new(4096);
|
||||
|
||||
MMIOT *doc = mkd_string(RSTRING_PTR(text), RSTRING_LEN(text), flags);
|
||||
|
||||
if ( mkd_compile(doc, flags) ) {
|
||||
szres = mkd_toc(doc, &res);
|
||||
|
||||
if ( szres != EOF ) {
|
||||
rb_str_cat(buf, res, szres);
|
||||
rb_str_cat(buf, "\n", 1);
|
||||
}
|
||||
}
|
||||
mkd_cleanup(doc);
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
int rb_rdiscount__get_flags(VALUE ruby_obj)
|
||||
{
|
||||
/* compile flags */
|
||||
int flags = MKD_TABSTOP | MKD_NOHEADER;
|
||||
|
||||
/* smart */
|
||||
if ( rb_funcall(ruby_obj, rb_intern("smart"), 0) != Qtrue )
|
||||
flags = flags | MKD_NOPANTS;
|
||||
|
||||
/* filter_html */
|
||||
if ( rb_funcall(ruby_obj, rb_intern("filter_html"), 0) == Qtrue )
|
||||
flags = flags | MKD_NOHTML;
|
||||
|
||||
/* generate_toc */
|
||||
if ( rb_funcall(ruby_obj, rb_intern("generate_toc"), 0) == Qtrue)
|
||||
flags = flags | MKD_TOC;
|
||||
|
||||
/* no_image */
|
||||
if ( rb_funcall(ruby_obj, rb_intern("no_image"), 0) == Qtrue)
|
||||
flags = flags | MKD_NOIMAGE;
|
||||
|
||||
/* no_links */
|
||||
if ( rb_funcall(ruby_obj, rb_intern("no_links"), 0) == Qtrue)
|
||||
flags = flags | MKD_NOLINKS;
|
||||
|
||||
/* no_tables */
|
||||
if ( rb_funcall(ruby_obj, rb_intern("no_tables"), 0) == Qtrue)
|
||||
flags = flags | MKD_NOTABLES;
|
||||
|
||||
/* strict */
|
||||
if ( rb_funcall(ruby_obj, rb_intern("strict"), 0) == Qtrue)
|
||||
flags = flags | MKD_STRICT;
|
||||
|
||||
/* autolink */
|
||||
if ( rb_funcall(ruby_obj, rb_intern("autolink"), 0) == Qtrue)
|
||||
flags = flags | MKD_AUTOLINK;
|
||||
|
||||
/* safelink */
|
||||
if ( rb_funcall(ruby_obj, rb_intern("safelink"), 0) == Qtrue)
|
||||
flags = flags | MKD_SAFELINK;
|
||||
|
||||
/* no_pseudo_protocols */
|
||||
if ( rb_funcall(ruby_obj, rb_intern("no_pseudo_protocols"), 0) == Qtrue)
|
||||
flags = flags | MKD_NO_EXT;
|
||||
|
||||
|
||||
return flags;
|
||||
}
|
||||
|
||||
|
||||
void Init_rdiscount()
|
||||
{
|
||||
rb_cRDiscount = rb_define_class("RDiscount", rb_cObject);
|
||||
rb_define_method(rb_cRDiscount, "to_html", rb_rdiscount_to_html, -1);
|
||||
rb_define_method(rb_cRDiscount, "toc_content", rb_rdiscount_toc_content, -1);
|
||||
}
|
||||
|
||||
/* vim: set ts=4 sw=4: */
|
||||
2538
test/fixtures/c/redis.c
vendored
Normal file
2538
test/fixtures/c/redis.c
vendored
Normal file
File diff suppressed because it is too large
Load Diff
164
test/fixtures/c/yajl.c
vendored
Normal file
164
test/fixtures/c/yajl.c
vendored
Normal file
@@ -0,0 +1,164 @@
|
||||
/*
|
||||
* Copyright 2010, Lloyd Hilaiel.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
*
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in
|
||||
* the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
*
|
||||
* 3. Neither the name of Lloyd Hilaiel nor the names of its
|
||||
* contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
||||
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
||||
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
||||
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
|
||||
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include "api/yajl_parse.h"
|
||||
#include "yajl_lex.h"
|
||||
#include "yajl_parser.h"
|
||||
#include "yajl_alloc.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <assert.h>
|
||||
|
||||
const char *
|
||||
yajl_status_to_string(yajl_status stat)
|
||||
{
|
||||
const char * statStr = "unknown";
|
||||
switch (stat) {
|
||||
case yajl_status_ok:
|
||||
statStr = "ok, no error";
|
||||
break;
|
||||
case yajl_status_client_canceled:
|
||||
statStr = "client canceled parse";
|
||||
break;
|
||||
case yajl_status_insufficient_data:
|
||||
statStr = "eof was met before the parse could complete";
|
||||
break;
|
||||
case yajl_status_error:
|
||||
statStr = "parse error";
|
||||
break;
|
||||
}
|
||||
return statStr;
|
||||
}
|
||||
|
||||
yajl_handle
|
||||
yajl_alloc(const yajl_callbacks * callbacks,
|
||||
const yajl_parser_config * config,
|
||||
const yajl_alloc_funcs * afs,
|
||||
void * ctx)
|
||||
{
|
||||
unsigned int allowComments = 0;
|
||||
unsigned int validateUTF8 = 0;
|
||||
yajl_handle hand = NULL;
|
||||
yajl_alloc_funcs afsBuffer;
|
||||
|
||||
/* first order of business is to set up memory allocation routines */
|
||||
if (afs != NULL) {
|
||||
if (afs->malloc == NULL || afs->realloc == NULL || afs->free == NULL)
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
} else {
|
||||
yajl_set_default_alloc_funcs(&afsBuffer);
|
||||
afs = &afsBuffer;
|
||||
}
|
||||
|
||||
hand = (yajl_handle) YA_MALLOC(afs, sizeof(struct yajl_handle_t));
|
||||
|
||||
/* copy in pointers to allocation routines */
|
||||
memcpy((void *) &(hand->alloc), (void *) afs, sizeof(yajl_alloc_funcs));
|
||||
|
||||
if (config != NULL) {
|
||||
allowComments = config->allowComments;
|
||||
validateUTF8 = config->checkUTF8;
|
||||
}
|
||||
|
||||
hand->callbacks = callbacks;
|
||||
hand->ctx = ctx;
|
||||
hand->lexer = yajl_lex_alloc(&(hand->alloc), allowComments, validateUTF8);
|
||||
hand->bytesConsumed = 0;
|
||||
hand->decodeBuf = yajl_buf_alloc(&(hand->alloc));
|
||||
yajl_bs_init(hand->stateStack, &(hand->alloc));
|
||||
|
||||
yajl_bs_push(hand->stateStack, yajl_state_start);
|
||||
|
||||
return hand;
|
||||
}
|
||||
|
||||
void
|
||||
yajl_reset_parser(yajl_handle hand) {
|
||||
hand->lexer = yajl_lex_realloc(hand->lexer);
|
||||
}
|
||||
|
||||
void
|
||||
yajl_free(yajl_handle handle)
|
||||
{
|
||||
yajl_bs_free(handle->stateStack);
|
||||
yajl_buf_free(handle->decodeBuf);
|
||||
yajl_lex_free(handle->lexer);
|
||||
YA_FREE(&(handle->alloc), handle);
|
||||
}
|
||||
|
||||
yajl_status
|
||||
yajl_parse(yajl_handle hand, const unsigned char * jsonText,
|
||||
unsigned int jsonTextLen)
|
||||
{
|
||||
yajl_status status;
|
||||
status = yajl_do_parse(hand, jsonText, jsonTextLen);
|
||||
return status;
|
||||
}
|
||||
|
||||
yajl_status
|
||||
yajl_parse_complete(yajl_handle hand)
|
||||
{
|
||||
/* The particular case we want to handle is a trailing number.
|
||||
* Further input consisting of digits could cause our interpretation
|
||||
* of the number to change (buffered "1" but "2" comes in).
|
||||
* A very simple approach to this is to inject whitespace to terminate
|
||||
* any number in the lex buffer.
|
||||
*/
|
||||
return yajl_parse(hand, (const unsigned char *)" ", 1);
|
||||
}
|
||||
|
||||
unsigned char *
|
||||
yajl_get_error(yajl_handle hand, int verbose,
|
||||
const unsigned char * jsonText, unsigned int jsonTextLen)
|
||||
{
|
||||
return yajl_render_error_string(hand, jsonText, jsonTextLen, verbose);
|
||||
}
|
||||
|
||||
unsigned int
|
||||
yajl_get_bytes_consumed(yajl_handle hand)
|
||||
{
|
||||
if (!hand) return 0;
|
||||
else return hand->bytesConsumed;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
yajl_free_error(yajl_handle hand, unsigned char * str)
|
||||
{
|
||||
/* use memory allocation functions if set */
|
||||
YA_FREE(&(hand->alloc), str);
|
||||
}
|
||||
|
||||
/* XXX: add utility routines to parse from file */
|
||||
55
test/fixtures/coffee/browser.coffee
vendored
Normal file
55
test/fixtures/coffee/browser.coffee
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
# Override exported methods for non-Node.js engines.
|
||||
|
||||
CoffeeScript = require './coffee-script'
|
||||
CoffeeScript.require = require
|
||||
|
||||
# Use standard JavaScript `eval` to eval code.
|
||||
CoffeeScript.eval = (code, options = {}) ->
|
||||
options.bare ?= on
|
||||
eval CoffeeScript.compile code, options
|
||||
|
||||
# Running code does not provide access to this scope.
|
||||
CoffeeScript.run = (code, options = {}) ->
|
||||
options.bare = on
|
||||
Function(CoffeeScript.compile code, options)()
|
||||
|
||||
# If we're not in a browser environment, we're finished with the public API.
|
||||
return unless window?
|
||||
|
||||
# Load a remote script from the current domain via XHR.
|
||||
CoffeeScript.load = (url, callback) ->
|
||||
xhr = new (window.ActiveXObject or XMLHttpRequest)('Microsoft.XMLHTTP')
|
||||
xhr.open 'GET', url, true
|
||||
xhr.overrideMimeType 'text/plain' if 'overrideMimeType' of xhr
|
||||
xhr.onreadystatechange = ->
|
||||
if xhr.readyState is 4
|
||||
if xhr.status in [0, 200]
|
||||
CoffeeScript.run xhr.responseText
|
||||
else
|
||||
throw new Error "Could not load #{url}"
|
||||
callback() if callback
|
||||
xhr.send null
|
||||
|
||||
# Activate CoffeeScript in the browser by having it compile and evaluate
|
||||
# all script tags with a content-type of `text/coffeescript`.
|
||||
# This happens on page load.
|
||||
runScripts = ->
|
||||
scripts = document.getElementsByTagName 'script'
|
||||
coffees = (s for s in scripts when s.type is 'text/coffeescript')
|
||||
index = 0
|
||||
length = coffees.length
|
||||
do execute = ->
|
||||
script = coffees[index++]
|
||||
if script?.type is 'text/coffeescript'
|
||||
if script.src
|
||||
CoffeeScript.load script.src, execute
|
||||
else
|
||||
CoffeeScript.run script.innerHTML
|
||||
execute()
|
||||
null
|
||||
|
||||
# Listen for window load, both in browsers and in IE.
|
||||
if window.addEventListener
|
||||
addEventListener 'DOMContentLoaded', runScripts, no
|
||||
else
|
||||
attachEvent 'onload', runScripts
|
||||
130
test/fixtures/coffee/coffee-script.coffee
vendored
Normal file
130
test/fixtures/coffee/coffee-script.coffee
vendored
Normal file
@@ -0,0 +1,130 @@
|
||||
# CoffeeScript can be used both on the server, as a command-line compiler based
|
||||
# on Node.js/V8, or to run CoffeeScripts directly in the browser. This module
|
||||
# contains the main entry functions for tokenizing, parsing, and compiling
|
||||
# source CoffeeScript into JavaScript.
|
||||
#
|
||||
# If included on a webpage, it will automatically sniff out, compile, and
|
||||
# execute all scripts present in `text/coffeescript` tags.
|
||||
|
||||
fs = require 'fs'
|
||||
path = require 'path'
|
||||
{Lexer,RESERVED} = require './lexer'
|
||||
{parser} = require './parser'
|
||||
vm = require 'vm'
|
||||
|
||||
# TODO: Remove registerExtension when fully deprecated.
|
||||
if require.extensions
|
||||
require.extensions['.coffee'] = (module, filename) ->
|
||||
content = compile fs.readFileSync(filename, 'utf8'), {filename}
|
||||
module._compile content, filename
|
||||
else if require.registerExtension
|
||||
require.registerExtension '.coffee', (content) -> compile content
|
||||
|
||||
# The current CoffeeScript version number.
|
||||
exports.VERSION = '1.3.3'
|
||||
|
||||
# Words that cannot be used as identifiers in CoffeeScript code
|
||||
exports.RESERVED = RESERVED
|
||||
|
||||
# Expose helpers for testing.
|
||||
exports.helpers = require './helpers'
|
||||
|
||||
# Compile a string of CoffeeScript code to JavaScript, using the Coffee/Jison
|
||||
# compiler.
|
||||
exports.compile = compile = (code, options = {}) ->
|
||||
{merge} = exports.helpers
|
||||
try
|
||||
js = (parser.parse lexer.tokenize code).compile options
|
||||
return js unless options.header
|
||||
catch err
|
||||
err.message = "In #{options.filename}, #{err.message}" if options.filename
|
||||
throw err
|
||||
header = "Generated by CoffeeScript #{@VERSION}"
|
||||
"// #{header}\n#{js}"
|
||||
|
||||
# Tokenize a string of CoffeeScript code, and return the array of tokens.
|
||||
exports.tokens = (code, options) ->
|
||||
lexer.tokenize code, options
|
||||
|
||||
# Parse a string of CoffeeScript code or an array of lexed tokens, and
|
||||
# return the AST. You can then compile it by calling `.compile()` on the root,
|
||||
# or traverse it by using `.traverseChildren()` with a callback.
|
||||
exports.nodes = (source, options) ->
|
||||
if typeof source is 'string'
|
||||
parser.parse lexer.tokenize source, options
|
||||
else
|
||||
parser.parse source
|
||||
|
||||
# Compile and execute a string of CoffeeScript (on the server), correctly
|
||||
# setting `__filename`, `__dirname`, and relative `require()`.
|
||||
exports.run = (code, options = {}) ->
|
||||
mainModule = require.main
|
||||
|
||||
# Set the filename.
|
||||
mainModule.filename = process.argv[1] =
|
||||
if options.filename then fs.realpathSync(options.filename) else '.'
|
||||
|
||||
# Clear the module cache.
|
||||
mainModule.moduleCache and= {}
|
||||
|
||||
# Assign paths for node_modules loading
|
||||
mainModule.paths = require('module')._nodeModulePaths path.dirname fs.realpathSync options.filename
|
||||
|
||||
# Compile.
|
||||
if path.extname(mainModule.filename) isnt '.coffee' or require.extensions
|
||||
mainModule._compile compile(code, options), mainModule.filename
|
||||
else
|
||||
mainModule._compile code, mainModule.filename
|
||||
|
||||
# Compile and evaluate a string of CoffeeScript (in a Node.js-like environment).
|
||||
# The CoffeeScript REPL uses this to run the input.
|
||||
exports.eval = (code, options = {}) ->
|
||||
return unless code = code.trim()
|
||||
Script = vm.Script
|
||||
if Script
|
||||
if options.sandbox?
|
||||
if options.sandbox instanceof Script.createContext().constructor
|
||||
sandbox = options.sandbox
|
||||
else
|
||||
sandbox = Script.createContext()
|
||||
sandbox[k] = v for own k, v of options.sandbox
|
||||
sandbox.global = sandbox.root = sandbox.GLOBAL = sandbox
|
||||
else
|
||||
sandbox = global
|
||||
sandbox.__filename = options.filename || 'eval'
|
||||
sandbox.__dirname = path.dirname sandbox.__filename
|
||||
# define module/require only if they chose not to specify their own
|
||||
unless sandbox isnt global or sandbox.module or sandbox.require
|
||||
Module = require 'module'
|
||||
sandbox.module = _module = new Module(options.modulename || 'eval')
|
||||
sandbox.require = _require = (path) -> Module._load path, _module, true
|
||||
_module.filename = sandbox.__filename
|
||||
_require[r] = require[r] for r in Object.getOwnPropertyNames require when r isnt 'paths'
|
||||
# use the same hack node currently uses for their own REPL
|
||||
_require.paths = _module.paths = Module._nodeModulePaths process.cwd()
|
||||
_require.resolve = (request) -> Module._resolveFilename request, _module
|
||||
o = {}
|
||||
o[k] = v for own k, v of options
|
||||
o.bare = on # ensure return value
|
||||
js = compile code, o
|
||||
if sandbox is global
|
||||
vm.runInThisContext js
|
||||
else
|
||||
vm.runInContext js, sandbox
|
||||
|
||||
# Instantiate a Lexer for our use here.
|
||||
lexer = new Lexer
|
||||
|
||||
# The real Lexer produces a generic stream of tokens. This object provides a
|
||||
# thin wrapper around it, compatible with the Jison API. We can then pass it
|
||||
# directly as a "Jison lexer".
|
||||
parser.lexer =
|
||||
lex: ->
|
||||
[tag, @yytext, @yylineno] = @tokens[@pos++] or ['']
|
||||
tag
|
||||
setInput: (@tokens) ->
|
||||
@pos = 0
|
||||
upcomingInput: ->
|
||||
""
|
||||
|
||||
parser.yy = require './nodes'
|
||||
709
test/fixtures/coffee/lexer.coffee
vendored
Normal file
709
test/fixtures/coffee/lexer.coffee
vendored
Normal file
@@ -0,0 +1,709 @@
|
||||
# The CoffeeScript Lexer. Uses a series of token-matching regexes to attempt
|
||||
# matches against the beginning of the source code. When a match is found,
|
||||
# a token is produced, we consume the match, and start again. Tokens are in the
|
||||
# form:
|
||||
#
|
||||
# [tag, value, lineNumber]
|
||||
#
|
||||
# Which is a format that can be fed directly into [Jison](http://github.com/zaach/jison).
|
||||
|
||||
{Rewriter, INVERSES} = require './rewriter'
|
||||
|
||||
# Import the helpers we need.
|
||||
{count, starts, compact, last} = require './helpers'
|
||||
|
||||
# The Lexer Class
|
||||
# ---------------
|
||||
|
||||
# The Lexer class reads a stream of CoffeeScript and divvies it up into tagged
|
||||
# tokens. Some potential ambiguity in the grammar has been avoided by
|
||||
# pushing some extra smarts into the Lexer.
|
||||
exports.Lexer = class Lexer
|
||||
|
||||
# **tokenize** is the Lexer's main method. Scan by attempting to match tokens
|
||||
# one at a time, using a regular expression anchored at the start of the
|
||||
# remaining code, or a custom recursive token-matching method
|
||||
# (for interpolations). When the next token has been recorded, we move forward
|
||||
# within the code past the token, and begin again.
|
||||
#
|
||||
# Each tokenizing method is responsible for returning the number of characters
|
||||
# it has consumed.
|
||||
#
|
||||
# Before returning the token stream, run it through the [Rewriter](rewriter.html)
|
||||
# unless explicitly asked not to.
|
||||
tokenize: (code, opts = {}) ->
|
||||
code = "\n#{code}" if WHITESPACE.test code
|
||||
code = code.replace(/\r/g, '').replace TRAILING_SPACES, ''
|
||||
|
||||
@code = code # The remainder of the source code.
|
||||
@line = opts.line or 0 # The current line.
|
||||
@indent = 0 # The current indentation level.
|
||||
@indebt = 0 # The over-indentation at the current level.
|
||||
@outdebt = 0 # The under-outdentation at the current level.
|
||||
@indents = [] # The stack of all current indentation levels.
|
||||
@ends = [] # The stack for pairing up tokens.
|
||||
@tokens = [] # Stream of parsed tokens in the form `['TYPE', value, line]`.
|
||||
|
||||
# At every position, run through this list of attempted matches,
|
||||
# short-circuiting if any of them succeed. Their order determines precedence:
|
||||
# `@literalToken` is the fallback catch-all.
|
||||
i = 0
|
||||
while @chunk = code[i..]
|
||||
i += @identifierToken() or
|
||||
@commentToken() or
|
||||
@whitespaceToken() or
|
||||
@lineToken() or
|
||||
@heredocToken() or
|
||||
@stringToken() or
|
||||
@numberToken() or
|
||||
@regexToken() or
|
||||
@jsToken() or
|
||||
@literalToken()
|
||||
|
||||
@closeIndentation()
|
||||
@error "missing #{tag}" if tag = @ends.pop()
|
||||
return @tokens if opts.rewrite is off
|
||||
(new Rewriter).rewrite @tokens
|
||||
|
||||
# Tokenizers
|
||||
# ----------
|
||||
|
||||
# Matches identifying literals: variables, keywords, method names, etc.
|
||||
# Check to ensure that JavaScript reserved words aren't being used as
|
||||
# identifiers. Because CoffeeScript reserves a handful of keywords that are
|
||||
# allowed in JavaScript, we're careful not to tag them as keywords when
|
||||
# referenced as property names here, so you can still do `jQuery.is()` even
|
||||
# though `is` means `===` otherwise.
|
||||
identifierToken: ->
|
||||
return 0 unless match = IDENTIFIER.exec @chunk
|
||||
[input, id, colon] = match
|
||||
|
||||
if id is 'own' and @tag() is 'FOR'
|
||||
@token 'OWN', id
|
||||
return id.length
|
||||
forcedIdentifier = colon or
|
||||
(prev = last @tokens) and (prev[0] in ['.', '?.', '::'] or
|
||||
not prev.spaced and prev[0] is '@')
|
||||
tag = 'IDENTIFIER'
|
||||
|
||||
if not forcedIdentifier and (id in JS_KEYWORDS or id in COFFEE_KEYWORDS)
|
||||
tag = id.toUpperCase()
|
||||
if tag is 'WHEN' and @tag() in LINE_BREAK
|
||||
tag = 'LEADING_WHEN'
|
||||
else if tag is 'FOR'
|
||||
@seenFor = yes
|
||||
else if tag is 'UNLESS'
|
||||
tag = 'IF'
|
||||
else if tag in UNARY
|
||||
tag = 'UNARY'
|
||||
else if tag in RELATION
|
||||
if tag isnt 'INSTANCEOF' and @seenFor
|
||||
tag = 'FOR' + tag
|
||||
@seenFor = no
|
||||
else
|
||||
tag = 'RELATION'
|
||||
if @value() is '!'
|
||||
@tokens.pop()
|
||||
id = '!' + id
|
||||
|
||||
if id in JS_FORBIDDEN
|
||||
if forcedIdentifier
|
||||
tag = 'IDENTIFIER'
|
||||
id = new String id
|
||||
id.reserved = yes
|
||||
else if id in RESERVED
|
||||
@error "reserved word \"#{id}\""
|
||||
|
||||
unless forcedIdentifier
|
||||
id = COFFEE_ALIAS_MAP[id] if id in COFFEE_ALIASES
|
||||
tag = switch id
|
||||
when '!' then 'UNARY'
|
||||
when '==', '!=' then 'COMPARE'
|
||||
when '&&', '||' then 'LOGIC'
|
||||
when 'true', 'false' then 'BOOL'
|
||||
when 'break', 'continue' then 'STATEMENT'
|
||||
else tag
|
||||
|
||||
@token tag, id
|
||||
@token ':', ':' if colon
|
||||
input.length
|
||||
|
||||
# Matches numbers, including decimals, hex, and exponential notation.
|
||||
# Be careful not to interfere with ranges-in-progress.
|
||||
numberToken: ->
|
||||
return 0 unless match = NUMBER.exec @chunk
|
||||
number = match[0]
|
||||
if /^0[BOX]/.test number
|
||||
@error "radix prefix '#{number}' must be lowercase"
|
||||
else if /E/.test(number) and not /^0x/.test number
|
||||
@error "exponential notation '#{number}' must be indicated with a lowercase 'e'"
|
||||
else if /^0\d*[89]/.test number
|
||||
@error "decimal literal '#{number}' must not be prefixed with '0'"
|
||||
else if /^0\d+/.test number
|
||||
@error "octal literal '#{number}' must be prefixed with '0o'"
|
||||
lexedLength = number.length
|
||||
if octalLiteral = /^0o([0-7]+)/.exec number
|
||||
number = '0x' + (parseInt octalLiteral[1], 8).toString 16
|
||||
if binaryLiteral = /^0b([01]+)/.exec number
|
||||
number = '0x' + (parseInt binaryLiteral[1], 2).toString 16
|
||||
@token 'NUMBER', number
|
||||
lexedLength
|
||||
|
||||
# Matches strings, including multi-line strings. Ensures that quotation marks
|
||||
# are balanced within the string's contents, and within nested interpolations.
|
||||
stringToken: ->
|
||||
switch @chunk.charAt 0
|
||||
when "'"
|
||||
return 0 unless match = SIMPLESTR.exec @chunk
|
||||
@token 'STRING', (string = match[0]).replace MULTILINER, '\\\n'
|
||||
when '"'
|
||||
return 0 unless string = @balancedString @chunk, '"'
|
||||
if 0 < string.indexOf '#{', 1
|
||||
@interpolateString string[1...-1]
|
||||
else
|
||||
@token 'STRING', @escapeLines string
|
||||
else
|
||||
return 0
|
||||
if octalEsc = /^(?:\\.|[^\\])*\\(?:0[0-7]|[1-7])/.test string
|
||||
@error "octal escape sequences #{string} are not allowed"
|
||||
@line += count string, '\n'
|
||||
string.length
|
||||
|
||||
# Matches heredocs, adjusting indentation to the correct level, as heredocs
|
||||
# preserve whitespace, but ignore indentation to the left.
|
||||
heredocToken: ->
|
||||
return 0 unless match = HEREDOC.exec @chunk
|
||||
heredoc = match[0]
|
||||
quote = heredoc.charAt 0
|
||||
doc = @sanitizeHeredoc match[2], quote: quote, indent: null
|
||||
if quote is '"' and 0 <= doc.indexOf '#{'
|
||||
@interpolateString doc, heredoc: yes
|
||||
else
|
||||
@token 'STRING', @makeString doc, quote, yes
|
||||
@line += count heredoc, '\n'
|
||||
heredoc.length
|
||||
|
||||
# Matches and consumes comments.
|
||||
commentToken: ->
|
||||
return 0 unless match = @chunk.match COMMENT
|
||||
[comment, here] = match
|
||||
if here
|
||||
@token 'HERECOMMENT', @sanitizeHeredoc here,
|
||||
herecomment: true, indent: Array(@indent + 1).join(' ')
|
||||
@line += count comment, '\n'
|
||||
comment.length
|
||||
|
||||
# Matches JavaScript interpolated directly into the source via backticks.
|
||||
jsToken: ->
|
||||
return 0 unless @chunk.charAt(0) is '`' and match = JSTOKEN.exec @chunk
|
||||
@token 'JS', (script = match[0])[1...-1]
|
||||
script.length
|
||||
|
||||
# Matches regular expression literals. Lexing regular expressions is difficult
|
||||
# to distinguish from division, so we borrow some basic heuristics from
|
||||
# JavaScript and Ruby.
|
||||
regexToken: ->
|
||||
return 0 if @chunk.charAt(0) isnt '/'
|
||||
if match = HEREGEX.exec @chunk
|
||||
length = @heregexToken match
|
||||
@line += count match[0], '\n'
|
||||
return length
|
||||
|
||||
prev = last @tokens
|
||||
return 0 if prev and (prev[0] in (if prev.spaced then NOT_REGEX else NOT_SPACED_REGEX))
|
||||
return 0 unless match = REGEX.exec @chunk
|
||||
[match, regex, flags] = match
|
||||
if regex[..1] is '/*' then @error 'regular expressions cannot begin with `*`'
|
||||
if regex is '//' then regex = '/(?:)/'
|
||||
@token 'REGEX', "#{regex}#{flags}"
|
||||
match.length
|
||||
|
||||
# Matches multiline extended regular expressions.
|
||||
heregexToken: (match) ->
|
||||
[heregex, body, flags] = match
|
||||
if 0 > body.indexOf '#{'
|
||||
re = body.replace(HEREGEX_OMIT, '').replace(/\//g, '\\/')
|
||||
if re.match /^\*/ then @error 'regular expressions cannot begin with `*`'
|
||||
@token 'REGEX', "/#{ re or '(?:)' }/#{flags}"
|
||||
return heregex.length
|
||||
@token 'IDENTIFIER', 'RegExp'
|
||||
@tokens.push ['CALL_START', '(']
|
||||
tokens = []
|
||||
for [tag, value] in @interpolateString(body, regex: yes)
|
||||
if tag is 'TOKENS'
|
||||
tokens.push value...
|
||||
else
|
||||
continue unless value = value.replace HEREGEX_OMIT, ''
|
||||
value = value.replace /\\/g, '\\\\'
|
||||
tokens.push ['STRING', @makeString(value, '"', yes)]
|
||||
tokens.push ['+', '+']
|
||||
tokens.pop()
|
||||
@tokens.push ['STRING', '""'], ['+', '+'] unless tokens[0]?[0] is 'STRING'
|
||||
@tokens.push tokens...
|
||||
@tokens.push [',', ','], ['STRING', '"' + flags + '"'] if flags
|
||||
@token ')', ')'
|
||||
heregex.length
|
||||
|
||||
# Matches newlines, indents, and outdents, and determines which is which.
|
||||
# If we can detect that the current line is continued onto the the next line,
|
||||
# then the newline is suppressed:
|
||||
#
|
||||
# elements
|
||||
# .each( ... )
|
||||
# .map( ... )
|
||||
#
|
||||
# Keeps track of the level of indentation, because a single outdent token
|
||||
# can close multiple indents, so we need to know how far in we happen to be.
|
||||
lineToken: ->
|
||||
return 0 unless match = MULTI_DENT.exec @chunk
|
||||
indent = match[0]
|
||||
@line += count indent, '\n'
|
||||
@seenFor = no
|
||||
size = indent.length - 1 - indent.lastIndexOf '\n'
|
||||
noNewlines = @unfinished()
|
||||
if size - @indebt is @indent
|
||||
if noNewlines then @suppressNewlines() else @newlineToken()
|
||||
return indent.length
|
||||
if size > @indent
|
||||
if noNewlines
|
||||
@indebt = size - @indent
|
||||
@suppressNewlines()
|
||||
return indent.length
|
||||
diff = size - @indent + @outdebt
|
||||
@token 'INDENT', diff
|
||||
@indents.push diff
|
||||
@ends.push 'OUTDENT'
|
||||
@outdebt = @indebt = 0
|
||||
else
|
||||
@indebt = 0
|
||||
@outdentToken @indent - size, noNewlines
|
||||
@indent = size
|
||||
indent.length
|
||||
|
||||
# Record an outdent token or multiple tokens, if we happen to be moving back
|
||||
# inwards past several recorded indents.
|
||||
outdentToken: (moveOut, noNewlines) ->
|
||||
while moveOut > 0
|
||||
len = @indents.length - 1
|
||||
if @indents[len] is undefined
|
||||
moveOut = 0
|
||||
else if @indents[len] is @outdebt
|
||||
moveOut -= @outdebt
|
||||
@outdebt = 0
|
||||
else if @indents[len] < @outdebt
|
||||
@outdebt -= @indents[len]
|
||||
moveOut -= @indents[len]
|
||||
else
|
||||
dent = @indents.pop() - @outdebt
|
||||
moveOut -= dent
|
||||
@outdebt = 0
|
||||
@pair 'OUTDENT'
|
||||
@token 'OUTDENT', dent
|
||||
@outdebt -= moveOut if dent
|
||||
@tokens.pop() while @value() is ';'
|
||||
@token 'TERMINATOR', '\n' unless @tag() is 'TERMINATOR' or noNewlines
|
||||
this
|
||||
|
||||
# Matches and consumes non-meaningful whitespace. Tag the previous token
|
||||
# as being "spaced", because there are some cases where it makes a difference.
|
||||
whitespaceToken: ->
|
||||
return 0 unless (match = WHITESPACE.exec @chunk) or
|
||||
(nline = @chunk.charAt(0) is '\n')
|
||||
prev = last @tokens
|
||||
prev[if match then 'spaced' else 'newLine'] = true if prev
|
||||
if match then match[0].length else 0
|
||||
|
||||
# Generate a newline token. Consecutive newlines get merged together.
|
||||
newlineToken: ->
|
||||
@tokens.pop() while @value() is ';'
|
||||
@token 'TERMINATOR', '\n' unless @tag() is 'TERMINATOR'
|
||||
this
|
||||
|
||||
# Use a `\` at a line-ending to suppress the newline.
|
||||
# The slash is removed here once its job is done.
|
||||
suppressNewlines: ->
|
||||
@tokens.pop() if @value() is '\\'
|
||||
this
|
||||
|
||||
# We treat all other single characters as a token. E.g.: `( ) , . !`
|
||||
# Multi-character operators are also literal tokens, so that Jison can assign
|
||||
# the proper order of operations. There are some symbols that we tag specially
|
||||
# here. `;` and newlines are both treated as a `TERMINATOR`, we distinguish
|
||||
# parentheses that indicate a method call from regular parentheses, and so on.
|
||||
literalToken: ->
|
||||
if match = OPERATOR.exec @chunk
|
||||
[value] = match
|
||||
@tagParameters() if CODE.test value
|
||||
else
|
||||
value = @chunk.charAt 0
|
||||
tag = value
|
||||
prev = last @tokens
|
||||
if value is '=' and prev
|
||||
if not prev[1].reserved and prev[1] in JS_FORBIDDEN
|
||||
@error "reserved word \"#{@value()}\" can't be assigned"
|
||||
if prev[1] in ['||', '&&']
|
||||
prev[0] = 'COMPOUND_ASSIGN'
|
||||
prev[1] += '='
|
||||
return value.length
|
||||
if value is ';'
|
||||
@seenFor = no
|
||||
tag = 'TERMINATOR'
|
||||
else if value in MATH then tag = 'MATH'
|
||||
else if value in COMPARE then tag = 'COMPARE'
|
||||
else if value in COMPOUND_ASSIGN then tag = 'COMPOUND_ASSIGN'
|
||||
else if value in UNARY then tag = 'UNARY'
|
||||
else if value in SHIFT then tag = 'SHIFT'
|
||||
else if value in LOGIC or value is '?' and prev?.spaced then tag = 'LOGIC'
|
||||
else if prev and not prev.spaced
|
||||
if value is '(' and prev[0] in CALLABLE
|
||||
prev[0] = 'FUNC_EXIST' if prev[0] is '?'
|
||||
tag = 'CALL_START'
|
||||
else if value is '[' and prev[0] in INDEXABLE
|
||||
tag = 'INDEX_START'
|
||||
switch prev[0]
|
||||
when '?' then prev[0] = 'INDEX_SOAK'
|
||||
switch value
|
||||
when '(', '{', '[' then @ends.push INVERSES[value]
|
||||
when ')', '}', ']' then @pair value
|
||||
@token tag, value
|
||||
value.length
|
||||
|
||||
# Token Manipulators
|
||||
# ------------------
|
||||
|
||||
# Sanitize a heredoc or herecomment by
|
||||
# erasing all external indentation on the left-hand side.
|
||||
sanitizeHeredoc: (doc, options) ->
|
||||
{indent, herecomment} = options
|
||||
if herecomment
|
||||
if HEREDOC_ILLEGAL.test doc
|
||||
@error "block comment cannot contain \"*/\", starting"
|
||||
return doc if doc.indexOf('\n') <= 0
|
||||
else
|
||||
while match = HEREDOC_INDENT.exec doc
|
||||
attempt = match[1]
|
||||
indent = attempt if indent is null or 0 < attempt.length < indent.length
|
||||
doc = doc.replace /// \n #{indent} ///g, '\n' if indent
|
||||
doc = doc.replace /^\n/, '' unless herecomment
|
||||
doc
|
||||
|
||||
# A source of ambiguity in our grammar used to be parameter lists in function
|
||||
# definitions versus argument lists in function calls. Walk backwards, tagging
|
||||
# parameters specially in order to make things easier for the parser.
|
||||
tagParameters: ->
|
||||
return this if @tag() isnt ')'
|
||||
stack = []
|
||||
{tokens} = this
|
||||
i = tokens.length
|
||||
tokens[--i][0] = 'PARAM_END'
|
||||
while tok = tokens[--i]
|
||||
switch tok[0]
|
||||
when ')'
|
||||
stack.push tok
|
||||
when '(', 'CALL_START'
|
||||
if stack.length then stack.pop()
|
||||
else if tok[0] is '('
|
||||
tok[0] = 'PARAM_START'
|
||||
return this
|
||||
else return this
|
||||
this
|
||||
|
||||
# Close up all remaining open blocks at the end of the file.
|
||||
closeIndentation: ->
|
||||
@outdentToken @indent
|
||||
|
||||
# Matches a balanced group such as a single or double-quoted string. Pass in
|
||||
# a series of delimiters, all of which must be nested correctly within the
|
||||
# contents of the string. This method allows us to have strings within
|
||||
# interpolations within strings, ad infinitum.
|
||||
balancedString: (str, end) ->
|
||||
continueCount = 0
|
||||
stack = [end]
|
||||
for i in [1...str.length]
|
||||
if continueCount
|
||||
--continueCount
|
||||
continue
|
||||
switch letter = str.charAt i
|
||||
when '\\'
|
||||
++continueCount
|
||||
continue
|
||||
when end
|
||||
stack.pop()
|
||||
unless stack.length
|
||||
return str[0..i]
|
||||
end = stack[stack.length - 1]
|
||||
continue
|
||||
if end is '}' and letter in ['"', "'"]
|
||||
stack.push end = letter
|
||||
else if end is '}' and letter is '/' and match = (HEREGEX.exec(str[i..]) or REGEX.exec(str[i..]))
|
||||
continueCount += match[0].length - 1
|
||||
else if end is '}' and letter is '{'
|
||||
stack.push end = '}'
|
||||
else if end is '"' and prev is '#' and letter is '{'
|
||||
stack.push end = '}'
|
||||
prev = letter
|
||||
@error "missing #{ stack.pop() }, starting"
|
||||
|
||||
# Expand variables and expressions inside double-quoted strings using
|
||||
# Ruby-like notation for substitution of arbitrary expressions.
|
||||
#
|
||||
# "Hello #{name.capitalize()}."
|
||||
#
|
||||
# If it encounters an interpolation, this method will recursively create a
|
||||
# new Lexer, tokenize the interpolated contents, and merge them into the
|
||||
# token stream.
|
||||
interpolateString: (str, options = {}) ->
|
||||
{heredoc, regex} = options
|
||||
tokens = []
|
||||
pi = 0
|
||||
i = -1
|
||||
while letter = str.charAt i += 1
|
||||
if letter is '\\'
|
||||
i += 1
|
||||
continue
|
||||
unless letter is '#' and str.charAt(i+1) is '{' and
|
||||
(expr = @balancedString str[i + 1..], '}')
|
||||
continue
|
||||
tokens.push ['NEOSTRING', str[pi...i]] if pi < i
|
||||
inner = expr[1...-1]
|
||||
if inner.length
|
||||
nested = new Lexer().tokenize inner, line: @line, rewrite: off
|
||||
nested.pop()
|
||||
nested.shift() if nested[0]?[0] is 'TERMINATOR'
|
||||
if len = nested.length
|
||||
if len > 1
|
||||
nested.unshift ['(', '(', @line]
|
||||
nested.push [')', ')', @line]
|
||||
tokens.push ['TOKENS', nested]
|
||||
i += expr.length
|
||||
pi = i + 1
|
||||
tokens.push ['NEOSTRING', str[pi..]] if i > pi < str.length
|
||||
return tokens if regex
|
||||
return @token 'STRING', '""' unless tokens.length
|
||||
tokens.unshift ['', ''] unless tokens[0][0] is 'NEOSTRING'
|
||||
@token '(', '(' if interpolated = tokens.length > 1
|
||||
for [tag, value], i in tokens
|
||||
@token '+', '+' if i
|
||||
if tag is 'TOKENS'
|
||||
@tokens.push value...
|
||||
else
|
||||
@token 'STRING', @makeString value, '"', heredoc
|
||||
@token ')', ')' if interpolated
|
||||
tokens
|
||||
|
||||
# Pairs up a closing token, ensuring that all listed pairs of tokens are
|
||||
# correctly balanced throughout the course of the token stream.
|
||||
pair: (tag) ->
|
||||
unless tag is wanted = last @ends
|
||||
@error "unmatched #{tag}" unless 'OUTDENT' is wanted
|
||||
# Auto-close INDENT to support syntax like this:
|
||||
#
|
||||
# el.click((event) ->
|
||||
# el.hide())
|
||||
#
|
||||
@indent -= size = last @indents
|
||||
@outdentToken size, true
|
||||
return @pair tag
|
||||
@ends.pop()
|
||||
|
||||
# Helpers
|
||||
# -------
|
||||
|
||||
# Add a token to the results, taking note of the line number.
|
||||
token: (tag, value) ->
|
||||
@tokens.push [tag, value, @line]
|
||||
|
||||
# Peek at a tag in the current token stream.
|
||||
tag: (index, tag) ->
|
||||
(tok = last @tokens, index) and if tag then tok[0] = tag else tok[0]
|
||||
|
||||
# Peek at a value in the current token stream.
|
||||
value: (index, val) ->
|
||||
(tok = last @tokens, index) and if val then tok[1] = val else tok[1]
|
||||
|
||||
# Are we in the midst of an unfinished expression?
|
||||
unfinished: ->
|
||||
LINE_CONTINUER.test(@chunk) or
|
||||
@tag() in ['\\', '.', '?.', 'UNARY', 'MATH', '+', '-', 'SHIFT', 'RELATION'
|
||||
'COMPARE', 'LOGIC', 'THROW', 'EXTENDS']
|
||||
|
||||
# Converts newlines for string literals.
|
||||
escapeLines: (str, heredoc) ->
|
||||
str.replace MULTILINER, if heredoc then '\\n' else ''
|
||||
|
||||
# Constructs a string token by escaping quotes and newlines.
|
||||
makeString: (body, quote, heredoc) ->
|
||||
return quote + quote unless body
|
||||
body = body.replace /\\([\s\S])/g, (match, contents) ->
|
||||
if contents in ['\n', quote] then contents else match
|
||||
body = body.replace /// #{quote} ///g, '\\$&'
|
||||
quote + @escapeLines(body, heredoc) + quote
|
||||
|
||||
# Throws a syntax error on the current `@line`.
|
||||
error: (message) ->
|
||||
throw SyntaxError "#{message} on line #{ @line + 1}"
|
||||
|
||||
# Constants
|
||||
# ---------
|
||||
|
||||
# Keywords that CoffeeScript shares in common with JavaScript.
|
||||
JS_KEYWORDS = [
|
||||
'true', 'false', 'null', 'this'
|
||||
'new', 'delete', 'typeof', 'in', 'instanceof'
|
||||
'return', 'throw', 'break', 'continue', 'debugger'
|
||||
'if', 'else', 'switch', 'for', 'while', 'do', 'try', 'catch', 'finally'
|
||||
'class', 'extends', 'super'
|
||||
]
|
||||
|
||||
# CoffeeScript-only keywords.
|
||||
COFFEE_KEYWORDS = ['undefined', 'then', 'unless', 'until', 'loop', 'of', 'by', 'when']
|
||||
|
||||
COFFEE_ALIAS_MAP =
|
||||
and : '&&'
|
||||
or : '||'
|
||||
is : '=='
|
||||
isnt : '!='
|
||||
not : '!'
|
||||
yes : 'true'
|
||||
no : 'false'
|
||||
on : 'true'
|
||||
off : 'false'
|
||||
|
||||
COFFEE_ALIASES = (key for key of COFFEE_ALIAS_MAP)
|
||||
COFFEE_KEYWORDS = COFFEE_KEYWORDS.concat COFFEE_ALIASES
|
||||
|
||||
# The list of keywords that are reserved by JavaScript, but not used, or are
|
||||
# used by CoffeeScript internally. We throw an error when these are encountered,
|
||||
# to avoid having a JavaScript error at runtime.
|
||||
RESERVED = [
|
||||
'case', 'default', 'function', 'var', 'void', 'with'
|
||||
'const', 'let', 'enum', 'export', 'import', 'native'
|
||||
'__hasProp', '__extends', '__slice', '__bind', '__indexOf'
|
||||
'implements', 'interface', 'let', 'package',
|
||||
'private', 'protected', 'public', 'static', 'yield'
|
||||
]
|
||||
|
||||
STRICT_PROSCRIBED = ['arguments', 'eval']
|
||||
|
||||
# The superset of both JavaScript keywords and reserved words, none of which may
|
||||
# be used as identifiers or properties.
|
||||
JS_FORBIDDEN = JS_KEYWORDS.concat(RESERVED).concat(STRICT_PROSCRIBED)
|
||||
|
||||
exports.RESERVED = RESERVED.concat(JS_KEYWORDS).concat(COFFEE_KEYWORDS).concat(STRICT_PROSCRIBED)
|
||||
exports.STRICT_PROSCRIBED = STRICT_PROSCRIBED
|
||||
|
||||
# Token matching regexes.
|
||||
IDENTIFIER = /// ^
|
||||
( [$A-Za-z_\x7f-\uffff][$\w\x7f-\uffff]* )
|
||||
( [^\n\S]* : (?!:) )? # Is this a property name?
|
||||
///
|
||||
|
||||
NUMBER = ///
|
||||
^ 0b[01]+ | # binary
|
||||
^ 0o[0-7]+ | # octal
|
||||
^ 0x[\da-f]+ | # hex
|
||||
^ \d*\.?\d+ (?:e[+-]?\d+)? # decimal
|
||||
///i
|
||||
|
||||
HEREDOC = /// ^ ("""|''') ([\s\S]*?) (?:\n[^\n\S]*)? \1 ///
|
||||
|
||||
OPERATOR = /// ^ (
|
||||
?: [-=]> # function
|
||||
| [-+*/%<>&|^!?=]= # compound assign / compare
|
||||
| >>>=? # zero-fill right shift
|
||||
| ([-+:])\1 # doubles
|
||||
| ([&|<>])\2=? # logic / shift
|
||||
| \?\. # soak access
|
||||
| \.{2,3} # range or splat
|
||||
) ///
|
||||
|
||||
WHITESPACE = /^[^\n\S]+/
|
||||
|
||||
COMMENT = /^###([^#][\s\S]*?)(?:###[^\n\S]*|(?:###)?$)|^(?:\s*#(?!##[^#]).*)+/
|
||||
|
||||
CODE = /^[-=]>/
|
||||
|
||||
MULTI_DENT = /^(?:\n[^\n\S]*)+/
|
||||
|
||||
SIMPLESTR = /^'[^\\']*(?:\\.[^\\']*)*'/
|
||||
|
||||
JSTOKEN = /^`[^\\`]*(?:\\.[^\\`]*)*`/
|
||||
|
||||
# Regex-matching-regexes.
|
||||
REGEX = /// ^
|
||||
(/ (?! [\s=] ) # disallow leading whitespace or equals signs
|
||||
[^ [ / \n \\ ]* # every other thing
|
||||
(?:
|
||||
(?: \\[\s\S] # anything escaped
|
||||
| \[ # character class
|
||||
[^ \] \n \\ ]*
|
||||
(?: \\[\s\S] [^ \] \n \\ ]* )*
|
||||
]
|
||||
) [^ [ / \n \\ ]*
|
||||
)*
|
||||
/) ([imgy]{0,4}) (?!\w)
|
||||
///
|
||||
|
||||
HEREGEX = /// ^ /{3} ([\s\S]+?) /{3} ([imgy]{0,4}) (?!\w) ///
|
||||
|
||||
HEREGEX_OMIT = /\s+(?:#.*)?/g
|
||||
|
||||
# Token cleaning regexes.
|
||||
MULTILINER = /\n/g
|
||||
|
||||
HEREDOC_INDENT = /\n+([^\n\S]*)/g
|
||||
|
||||
HEREDOC_ILLEGAL = /\*\//
|
||||
|
||||
LINE_CONTINUER = /// ^ \s* (?: , | \??\.(?![.\d]) | :: ) ///
|
||||
|
||||
TRAILING_SPACES = /\s+$/
|
||||
|
||||
# Compound assignment tokens.
|
||||
COMPOUND_ASSIGN = [
|
||||
'-=', '+=', '/=', '*=', '%=', '||=', '&&=', '?=', '<<=', '>>=', '>>>=', '&=', '^=', '|='
|
||||
]
|
||||
|
||||
# Unary tokens.
|
||||
UNARY = ['!', '~', 'NEW', 'TYPEOF', 'DELETE', 'DO']
|
||||
|
||||
# Logical tokens.
|
||||
LOGIC = ['&&', '||', '&', '|', '^']
|
||||
|
||||
# Bit-shifting tokens.
|
||||
SHIFT = ['<<', '>>', '>>>']
|
||||
|
||||
# Comparison tokens.
|
||||
COMPARE = ['==', '!=', '<', '>', '<=', '>=']
|
||||
|
||||
# Mathematical tokens.
|
||||
MATH = ['*', '/', '%']
|
||||
|
||||
# Relational tokens that are negatable with `not` prefix.
|
||||
RELATION = ['IN', 'OF', 'INSTANCEOF']
|
||||
|
||||
# Boolean tokens.
|
||||
BOOL = ['TRUE', 'FALSE']
|
||||
|
||||
# Tokens which a regular expression will never immediately follow, but which
|
||||
# a division operator might.
|
||||
#
|
||||
# See: http://www.mozilla.org/js/language/js20-2002-04/rationale/syntax.html#regular-expressions
|
||||
#
|
||||
# Our list is shorter, due to sans-parentheses method calls.
|
||||
NOT_REGEX = ['NUMBER', 'REGEX', 'BOOL', 'NULL', 'UNDEFINED', '++', '--', ']']
|
||||
|
||||
# If the previous token is not spaced, there are more preceding tokens that
|
||||
# force a division parse:
|
||||
NOT_SPACED_REGEX = NOT_REGEX.concat ')', '}', 'THIS', 'IDENTIFIER', 'STRING'
|
||||
|
||||
# Tokens which could legitimately be invoked or indexed. An opening
|
||||
# parentheses or bracket following these tokens will be recorded as the start
|
||||
# of a function invocation or indexing operation.
|
||||
CALLABLE = ['IDENTIFIER', 'STRING', 'REGEX', ')', ']', '}', '?', '::', '@', 'THIS', 'SUPER']
|
||||
INDEXABLE = CALLABLE.concat 'NUMBER', 'BOOL', 'NULL', 'UNDEFINED'
|
||||
|
||||
# Tokens that, when immediately preceding a `WHEN`, indicate that the `WHEN`
|
||||
# occurs at the start of a line. We disambiguate these from trailing whens to
|
||||
# avoid an ambiguity in the grammar.
|
||||
LINE_BREAK = ['INDENT', 'OUTDENT', 'TERMINATOR']
|
||||
256
test/fixtures/coffee/rack_application.coffee
vendored
Normal file
256
test/fixtures/coffee/rack_application.coffee
vendored
Normal file
@@ -0,0 +1,256 @@
|
||||
# The `RackApplication` class is responsible for managing a
|
||||
# [Nack](http://josh.github.com/nack/) pool for a given Rack
|
||||
# application. Incoming HTTP requests are dispatched to
|
||||
# `RackApplication` instances by an `HttpServer`, where they are
|
||||
# subsequently handled by a pool of Nack worker processes. By default,
|
||||
# Pow tells Nack to use a maximum of two worker processes per
|
||||
# application, but this can be overridden with the configuration's
|
||||
# `workers` option.
|
||||
#
|
||||
# Before creating the Nack pool, Pow executes the `.powrc` and
|
||||
# `.powenv` scripts if they're present in the application root,
|
||||
# captures their environment variables, and passes them along to the
|
||||
# Nack worker processes. This lets you modify your `RUBYOPT` to use
|
||||
# different Ruby options, for example.
|
||||
#
|
||||
# If [rvm](http://rvm.beginrescueend.com/) is installed and an
|
||||
# `.rvmrc` file is present in the application's root, Pow will load
|
||||
# both before creating the Nack pool. This makes it easy to run an
|
||||
# app with a specific version of Ruby.
|
||||
#
|
||||
# Nack workers remain running until they're killed, restarted (by
|
||||
# touching the `tmp/restart.txt` file in the application root), or
|
||||
# until the application has not served requests for the length of time
|
||||
# specified in the configuration's `timeout` option (15 minutes by
|
||||
# default).
|
||||
|
||||
async = require "async"
|
||||
fs = require "fs"
|
||||
nack = require "nack"
|
||||
|
||||
{bufferLines, pause, sourceScriptEnv} = require "./util"
|
||||
{join, exists, basename, resolve} = require "path"
|
||||
|
||||
module.exports = class RackApplication
|
||||
# Create a `RackApplication` for the given configuration and
|
||||
# root path. The application begins life in the uninitialized
|
||||
# state.
|
||||
constructor: (@configuration, @root, @firstHost) ->
|
||||
@logger = @configuration.getLogger join "apps", basename @root
|
||||
@readyCallbacks = []
|
||||
@quitCallbacks = []
|
||||
@statCallbacks = []
|
||||
|
||||
# Queue `callback` to be invoked when the application becomes ready,
|
||||
# then start the initialization process. If the application's state
|
||||
# is ready, the callback is invoked immediately.
|
||||
ready: (callback) ->
|
||||
if @state is "ready"
|
||||
callback()
|
||||
else
|
||||
@readyCallbacks.push callback
|
||||
@initialize()
|
||||
|
||||
# Tell the application to quit and queue `callback` to be invoked
|
||||
# when all workers have exited. If the application has already quit,
|
||||
# the callback is invoked immediately.
|
||||
quit: (callback) ->
|
||||
if @state
|
||||
@quitCallbacks.push callback if callback
|
||||
@terminate()
|
||||
else
|
||||
callback?()
|
||||
|
||||
# Stat `tmp/restart.txt` in the application root and invoke the
|
||||
# given callback with a single argument indicating whether or not
|
||||
# the file has been touched since the last call to
|
||||
# `queryRestartFile`.
|
||||
queryRestartFile: (callback) ->
|
||||
fs.stat join(@root, "tmp/restart.txt"), (err, stats) =>
|
||||
if err
|
||||
@mtime = null
|
||||
callback false
|
||||
else
|
||||
lastMtime = @mtime
|
||||
@mtime = stats.mtime.getTime()
|
||||
callback lastMtime isnt @mtime
|
||||
|
||||
# Check to see if `tmp/always_restart.txt` is present in the
|
||||
# application root, and set the pool's `runOnce` option
|
||||
# accordingly. Invoke `callback` when the existence check has
|
||||
# finished. (Multiple calls to this method are aggregated.)
|
||||
setPoolRunOnceFlag: (callback) ->
|
||||
unless @statCallbacks.length
|
||||
exists join(@root, "tmp/always_restart.txt"), (alwaysRestart) =>
|
||||
@pool.runOnce = alwaysRestart
|
||||
statCallback() for statCallback in @statCallbacks
|
||||
@statCallbacks = []
|
||||
|
||||
@statCallbacks.push callback
|
||||
|
||||
# Collect environment variables from `.powrc` and `.powenv`, in that
|
||||
# order, if present. The idea is that `.powrc` files can be checked
|
||||
# into a source code repository for global configuration, leaving
|
||||
# `.powenv` free for any necessary local overrides.
|
||||
loadScriptEnvironment: (env, callback) ->
|
||||
async.reduce [".powrc", ".envrc", ".powenv"], env, (env, filename, callback) =>
|
||||
exists script = join(@root, filename), (scriptExists) ->
|
||||
if scriptExists
|
||||
sourceScriptEnv script, env, callback
|
||||
else
|
||||
callback null, env
|
||||
, callback
|
||||
|
||||
# If `.rvmrc` and `$HOME/.rvm/scripts/rvm` are present, load rvm,
|
||||
# source `.rvmrc`, and invoke `callback` with the resulting
|
||||
# environment variables. If `.rvmrc` is present but rvm is not
|
||||
# installed, invoke `callback` without sourcing `.rvmrc`.
|
||||
# Before loading rvm, Pow invokes a helper script that shows a
|
||||
# deprecation notice if it has not yet been displayed.
|
||||
loadRvmEnvironment: (env, callback) ->
|
||||
exists script = join(@root, ".rvmrc"), (rvmrcExists) =>
|
||||
if rvmrcExists
|
||||
exists rvm = @configuration.rvmPath, (rvmExists) =>
|
||||
if rvmExists
|
||||
libexecPath = resolve "#{__dirname}/../libexec"
|
||||
before = """
|
||||
'#{libexecPath}/pow_rvm_deprecation_notice' '#{[@firstHost]}'
|
||||
source '#{rvm}' > /dev/null
|
||||
""".trim()
|
||||
sourceScriptEnv script, env, {before}, callback
|
||||
else
|
||||
callback null, env
|
||||
else
|
||||
callback null, env
|
||||
|
||||
# Stat `tmp/restart.txt` to cache its mtime, then load the
|
||||
# application's full environment from `.powrc`, `.powenv`, and
|
||||
# `.rvmrc`.
|
||||
loadEnvironment: (callback) ->
|
||||
@queryRestartFile =>
|
||||
@loadScriptEnvironment @configuration.env, (err, env) =>
|
||||
if err then callback err
|
||||
else @loadRvmEnvironment env, (err, env) =>
|
||||
if err then callback err
|
||||
else callback null, env
|
||||
|
||||
# Begin the initialization process if the application is in the
|
||||
# uninitialized state. (If the application is terminating, queue a
|
||||
# call to `initialize` after all workers have exited.)
|
||||
initialize: ->
|
||||
if @state
|
||||
if @state is "terminating"
|
||||
@quit => @initialize()
|
||||
return
|
||||
|
||||
@state = "initializing"
|
||||
|
||||
# Load the application's environment. If an error is raised or
|
||||
# either of the environment scripts exits with a non-zero status,
|
||||
# reset the application's state and log the error.
|
||||
@loadEnvironment (err, env) =>
|
||||
if err
|
||||
@state = null
|
||||
@logger.error err.message
|
||||
@logger.error "stdout: #{err.stdout}"
|
||||
@logger.error "stderr: #{err.stderr}"
|
||||
|
||||
# Set the application's state to ready. Then create the Nack
|
||||
# pool instance using the `workers` and `timeout` options from
|
||||
# the application's environment or the global configuration.
|
||||
else
|
||||
@state = "ready"
|
||||
|
||||
@pool = nack.createPool join(@root, "config.ru"),
|
||||
env: env
|
||||
size: env?.POW_WORKERS ? @configuration.workers
|
||||
idle: (env?.POW_TIMEOUT ? @configuration.timeout) * 1000
|
||||
|
||||
# Log the workers' stderr and stdout, and log each worker's
|
||||
# PID as it spawns and exits.
|
||||
bufferLines @pool.stdout, (line) => @logger.info line
|
||||
bufferLines @pool.stderr, (line) => @logger.warning line
|
||||
|
||||
@pool.on "worker:spawn", (process) =>
|
||||
@logger.debug "nack worker #{process.child.pid} spawned"
|
||||
|
||||
@pool.on "worker:exit", (process) =>
|
||||
@logger.debug "nack worker exited"
|
||||
|
||||
# Invoke and remove all queued callbacks, passing along the
|
||||
# error, if any.
|
||||
readyCallback err for readyCallback in @readyCallbacks
|
||||
@readyCallbacks = []
|
||||
|
||||
# Begin the termination process. (If the application is initializing,
|
||||
# wait until it is ready before shutting down.)
|
||||
terminate: ->
|
||||
if @state is "initializing"
|
||||
@ready => @terminate()
|
||||
|
||||
else if @state is "ready"
|
||||
@state = "terminating"
|
||||
|
||||
# Instruct all workers to exit. After the processes have
|
||||
# terminated, reset the application's state, then invoke and
|
||||
# remove all queued callbacks.
|
||||
@pool.quit =>
|
||||
@state = null
|
||||
@mtime = null
|
||||
@pool = null
|
||||
|
||||
quitCallback() for quitCallback in @quitCallbacks
|
||||
@quitCallbacks = []
|
||||
|
||||
# Handle an incoming HTTP request. Wait until the application is in
|
||||
# the ready state, restart the workers if necessary, then pass the
|
||||
# request along to the Nack pool. If the Nack worker raises an
|
||||
# exception handling the request, reset the application.
|
||||
handle: (req, res, next, callback) ->
|
||||
resume = pause req
|
||||
@ready (err) =>
|
||||
return next err if err
|
||||
@setPoolRunOnceFlag =>
|
||||
@restartIfNecessary =>
|
||||
req.proxyMetaVariables =
|
||||
SERVER_PORT: @configuration.dstPort.toString()
|
||||
try
|
||||
@pool.proxy req, res, (err) =>
|
||||
@quit() if err
|
||||
next err
|
||||
finally
|
||||
resume()
|
||||
callback?()
|
||||
|
||||
# Terminate the application, re-initialize it, and invoke the given
|
||||
# callback when the application's state becomes ready.
|
||||
restart: (callback) ->
|
||||
@quit =>
|
||||
@ready callback
|
||||
|
||||
# Restart the application if `tmp/restart.txt` has been touched
|
||||
# since the last call to this function.
|
||||
restartIfNecessary: (callback) ->
|
||||
@queryRestartFile (mtimeChanged) =>
|
||||
if mtimeChanged
|
||||
@restart callback
|
||||
else
|
||||
callback()
|
||||
|
||||
# Append RVM autoload boilerplate to the application's `.powrc`
|
||||
# file. This is called by the RVM deprecation notice mini-app.
|
||||
writeRvmBoilerplate: ->
|
||||
powrc = join @root, ".powrc"
|
||||
boilerplate = @constructor.rvmBoilerplate
|
||||
|
||||
fs.readFile powrc, "utf8", (err, contents) ->
|
||||
contents ?= ""
|
||||
if contents.indexOf(boilerplate) is -1
|
||||
fs.writeFile powrc, "#{boilerplate}\n#{contents}"
|
||||
|
||||
@rvmBoilerplate: """
|
||||
if [ -f "$rvm_path/scripts/rvm" ] && [ -f ".rvmrc" ]; then
|
||||
source "$rvm_path/scripts/rvm"
|
||||
source ".rvmrc"
|
||||
fi
|
||||
"""
|
||||
110
test/fixtures/coffee/xipd.coffee
vendored
Normal file
110
test/fixtures/coffee/xipd.coffee
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
dnsserver = require "dnsserver"
|
||||
|
||||
exports.Server = class Server extends dnsserver.Server
|
||||
NS_T_A = 1
|
||||
NS_T_NS = 2
|
||||
NS_T_CNAME = 5
|
||||
NS_T_SOA = 6
|
||||
NS_C_IN = 1
|
||||
NS_RCODE_NXDOMAIN = 3
|
||||
|
||||
constructor: (domain, @rootAddress) ->
|
||||
super
|
||||
@domain = domain.toLowerCase()
|
||||
@soa = createSOA @domain
|
||||
@on "request", @handleRequest
|
||||
|
||||
handleRequest: (req, res) =>
|
||||
question = req.question
|
||||
subdomain = @extractSubdomain question.name
|
||||
|
||||
if subdomain? and isARequest question
|
||||
res.addRR question.name, NS_T_A, NS_C_IN, 600, subdomain.getAddress()
|
||||
else if subdomain?.isEmpty() and isNSRequest question
|
||||
res.addRR question.name, NS_T_SOA, NS_C_IN, 600, @soa, true
|
||||
else
|
||||
res.header.rcode = NS_RCODE_NXDOMAIN
|
||||
|
||||
res.send()
|
||||
|
||||
extractSubdomain: (name) ->
|
||||
Subdomain.extract name, @domain, @rootAddress
|
||||
|
||||
isARequest = (question) ->
|
||||
question.type is NS_T_A and question.class is NS_C_IN
|
||||
|
||||
isNSRequest = (question) ->
|
||||
question.type is NS_T_NS and question.class is NS_C_IN
|
||||
|
||||
createSOA = (domain) ->
|
||||
mname = "ns-1.#{domain}"
|
||||
rname = "hostmaster.#{domain}"
|
||||
serial = parseInt new Date().getTime() / 1000
|
||||
refresh = 28800
|
||||
retry = 7200
|
||||
expire = 604800
|
||||
minimum = 3600
|
||||
dnsserver.createSOA mname, rname, serial, refresh, retry, expire, minimum
|
||||
|
||||
exports.createServer = (domain, address = "127.0.0.1") ->
|
||||
new Server domain, address
|
||||
|
||||
exports.Subdomain = class Subdomain
|
||||
@extract: (name, domain, address) ->
|
||||
return unless name
|
||||
name = name.toLowerCase()
|
||||
offset = name.length - domain.length
|
||||
|
||||
if domain is name.slice offset
|
||||
subdomain = if 0 >= offset then null else name.slice 0, offset - 1
|
||||
new constructor subdomain, address if constructor = @for subdomain
|
||||
|
||||
@for: (subdomain = "") ->
|
||||
if IPAddressSubdomain.pattern.test subdomain
|
||||
IPAddressSubdomain
|
||||
else if EncodedSubdomain.pattern.test subdomain
|
||||
EncodedSubdomain
|
||||
else
|
||||
Subdomain
|
||||
|
||||
constructor: (@subdomain, @address) ->
|
||||
@labels = subdomain?.split(".") ? []
|
||||
@length = @labels.length
|
||||
|
||||
isEmpty: ->
|
||||
@length is 0
|
||||
|
||||
getAddress: ->
|
||||
@address
|
||||
|
||||
class IPAddressSubdomain extends Subdomain
|
||||
@pattern = /// (^|\.)
|
||||
((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}
|
||||
(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)
|
||||
$ ///
|
||||
|
||||
getAddress: ->
|
||||
@labels.slice(-4).join "."
|
||||
|
||||
class EncodedSubdomain extends Subdomain
|
||||
@pattern = /(^|\.)[a-z0-9]{1,7}$/
|
||||
|
||||
getAddress: ->
|
||||
decode @labels[@length - 1]
|
||||
|
||||
exports.encode = encode = (ip) ->
|
||||
value = 0
|
||||
for byte, index in ip.split "."
|
||||
value += parseInt(byte, 10) << (index * 8)
|
||||
(value >>> 0).toString 36
|
||||
|
||||
PATTERN = /^[a-z0-9]{1,7}$/
|
||||
|
||||
exports.decode = decode = (string) ->
|
||||
return unless PATTERN.test string
|
||||
value = parseInt string, 36
|
||||
ip = []
|
||||
for i in [1..4]
|
||||
ip.push value & 0xFF
|
||||
value >>= 8
|
||||
ip.join "."
|
||||
81
test/fixtures/cpp/env.cpp
vendored
Normal file
81
test/fixtures/cpp/env.cpp
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
/*
|
||||
This file is part of the PhantomJS project from Ofi Labs.
|
||||
|
||||
Copyright (C) 2012 execjosh, http://execjosh.blogspot.com
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of the <organization> nor the
|
||||
names of its contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
||||
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include "env.h"
|
||||
|
||||
#include <QCoreApplication>
|
||||
#include <QString>
|
||||
#include <QVariantMap>
|
||||
|
||||
static Env *env_instance = (Env *)NULL;
|
||||
|
||||
Env *Env::instance()
|
||||
{
|
||||
if ((Env *)NULL == env_instance)
|
||||
env_instance = new Env();
|
||||
|
||||
return env_instance;
|
||||
}
|
||||
|
||||
Env::Env()
|
||||
: QObject(QCoreApplication::instance())
|
||||
{
|
||||
}
|
||||
|
||||
// public:
|
||||
|
||||
void Env::parse(const char **envp)
|
||||
{
|
||||
const char **env = (const char **)NULL;
|
||||
QString envvar, name, value;
|
||||
int indexOfEquals;
|
||||
// Loop for each of the <NAME>=<VALUE> pairs and split them into a map
|
||||
for (env = envp; *env != (const char *)NULL; env++) {
|
||||
envvar = QString(*env);
|
||||
indexOfEquals = envvar.indexOf('=');
|
||||
if (0 >= indexOfEquals) {
|
||||
// Should never happen because names cannot contain "=" and cannot
|
||||
// be empty. If it does happen, then just ignore this record.
|
||||
// See: http://pubs.opengroup.org/onlinepubs/009695399/basedefs/xbd_chap08.html
|
||||
continue;
|
||||
}
|
||||
// Extract name and value (if it exists) from envvar
|
||||
// NOTE:
|
||||
// QString::mid() will gracefully return an empty QString when the
|
||||
// specified position index is >= the length() of the string
|
||||
name = envvar.left(indexOfEquals);
|
||||
value = envvar.mid(indexOfEquals + 1);
|
||||
m_map.insert(name, value);
|
||||
}
|
||||
}
|
||||
|
||||
QVariantMap Env::asVariantMap() const
|
||||
{
|
||||
return m_map;
|
||||
}
|
||||
52
test/fixtures/cpp/env.h
vendored
Normal file
52
test/fixtures/cpp/env.h
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
/*
|
||||
This file is part of the PhantomJS project from Ofi Labs.
|
||||
|
||||
Copyright (C) 2012 execjosh, http://execjosh.blogspot.com
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of the <organization> nor the
|
||||
names of its contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
||||
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef ENV_H
|
||||
#define ENV_H
|
||||
|
||||
#include <QObject>
|
||||
#include <QVariantMap>
|
||||
|
||||
class Env : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
static Env *instance();
|
||||
|
||||
void parse(const char ** envp);
|
||||
QVariantMap asVariantMap() const;
|
||||
|
||||
private:
|
||||
Env();
|
||||
|
||||
QVariantMap m_map;
|
||||
};
|
||||
|
||||
#endif // ENV_H
|
||||
382
test/fixtures/cpp/key.cpp
vendored
Normal file
382
test/fixtures/cpp/key.cpp
vendored
Normal file
@@ -0,0 +1,382 @@
|
||||
// Copyright (c) 2009-2012 The Bitcoin developers
|
||||
// Distributed under the MIT/X11 software license, see the accompanying
|
||||
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
#include <map>
|
||||
|
||||
#include <openssl/ecdsa.h>
|
||||
#include <openssl/obj_mac.h>
|
||||
|
||||
#include "key.h"
|
||||
|
||||
// Generate a private key from just the secret parameter
|
||||
int EC_KEY_regenerate_key(EC_KEY *eckey, BIGNUM *priv_key)
|
||||
{
|
||||
int ok = 0;
|
||||
BN_CTX *ctx = NULL;
|
||||
EC_POINT *pub_key = NULL;
|
||||
|
||||
if (!eckey) return 0;
|
||||
|
||||
const EC_GROUP *group = EC_KEY_get0_group(eckey);
|
||||
|
||||
if ((ctx = BN_CTX_new()) == NULL)
|
||||
goto err;
|
||||
|
||||
pub_key = EC_POINT_new(group);
|
||||
|
||||
if (pub_key == NULL)
|
||||
goto err;
|
||||
|
||||
if (!EC_POINT_mul(group, pub_key, priv_key, NULL, NULL, ctx))
|
||||
goto err;
|
||||
|
||||
EC_KEY_set_private_key(eckey,priv_key);
|
||||
EC_KEY_set_public_key(eckey,pub_key);
|
||||
|
||||
ok = 1;
|
||||
|
||||
err:
|
||||
|
||||
if (pub_key)
|
||||
EC_POINT_free(pub_key);
|
||||
if (ctx != NULL)
|
||||
BN_CTX_free(ctx);
|
||||
|
||||
return(ok);
|
||||
}
|
||||
|
||||
// Perform ECDSA key recovery (see SEC1 4.1.6) for curves over (mod p)-fields
|
||||
// recid selects which key is recovered
|
||||
// if check is nonzero, additional checks are performed
|
||||
int ECDSA_SIG_recover_key_GFp(EC_KEY *eckey, ECDSA_SIG *ecsig, const unsigned char *msg, int msglen, int recid, int check)
|
||||
{
|
||||
if (!eckey) return 0;
|
||||
|
||||
int ret = 0;
|
||||
BN_CTX *ctx = NULL;
|
||||
|
||||
BIGNUM *x = NULL;
|
||||
BIGNUM *e = NULL;
|
||||
BIGNUM *order = NULL;
|
||||
BIGNUM *sor = NULL;
|
||||
BIGNUM *eor = NULL;
|
||||
BIGNUM *field = NULL;
|
||||
EC_POINT *R = NULL;
|
||||
EC_POINT *O = NULL;
|
||||
EC_POINT *Q = NULL;
|
||||
BIGNUM *rr = NULL;
|
||||
BIGNUM *zero = NULL;
|
||||
int n = 0;
|
||||
int i = recid / 2;
|
||||
|
||||
const EC_GROUP *group = EC_KEY_get0_group(eckey);
|
||||
if ((ctx = BN_CTX_new()) == NULL) { ret = -1; goto err; }
|
||||
BN_CTX_start(ctx);
|
||||
order = BN_CTX_get(ctx);
|
||||
if (!EC_GROUP_get_order(group, order, ctx)) { ret = -2; goto err; }
|
||||
x = BN_CTX_get(ctx);
|
||||
if (!BN_copy(x, order)) { ret=-1; goto err; }
|
||||
if (!BN_mul_word(x, i)) { ret=-1; goto err; }
|
||||
if (!BN_add(x, x, ecsig->r)) { ret=-1; goto err; }
|
||||
field = BN_CTX_get(ctx);
|
||||
if (!EC_GROUP_get_curve_GFp(group, field, NULL, NULL, ctx)) { ret=-2; goto err; }
|
||||
if (BN_cmp(x, field) >= 0) { ret=0; goto err; }
|
||||
if ((R = EC_POINT_new(group)) == NULL) { ret = -2; goto err; }
|
||||
if (!EC_POINT_set_compressed_coordinates_GFp(group, R, x, recid % 2, ctx)) { ret=0; goto err; }
|
||||
if (check)
|
||||
{
|
||||
if ((O = EC_POINT_new(group)) == NULL) { ret = -2; goto err; }
|
||||
if (!EC_POINT_mul(group, O, NULL, R, order, ctx)) { ret=-2; goto err; }
|
||||
if (!EC_POINT_is_at_infinity(group, O)) { ret = 0; goto err; }
|
||||
}
|
||||
if ((Q = EC_POINT_new(group)) == NULL) { ret = -2; goto err; }
|
||||
n = EC_GROUP_get_degree(group);
|
||||
e = BN_CTX_get(ctx);
|
||||
if (!BN_bin2bn(msg, msglen, e)) { ret=-1; goto err; }
|
||||
if (8*msglen > n) BN_rshift(e, e, 8-(n & 7));
|
||||
zero = BN_CTX_get(ctx);
|
||||
if (!BN_zero(zero)) { ret=-1; goto err; }
|
||||
if (!BN_mod_sub(e, zero, e, order, ctx)) { ret=-1; goto err; }
|
||||
rr = BN_CTX_get(ctx);
|
||||
if (!BN_mod_inverse(rr, ecsig->r, order, ctx)) { ret=-1; goto err; }
|
||||
sor = BN_CTX_get(ctx);
|
||||
if (!BN_mod_mul(sor, ecsig->s, rr, order, ctx)) { ret=-1; goto err; }
|
||||
eor = BN_CTX_get(ctx);
|
||||
if (!BN_mod_mul(eor, e, rr, order, ctx)) { ret=-1; goto err; }
|
||||
if (!EC_POINT_mul(group, Q, eor, R, sor, ctx)) { ret=-2; goto err; }
|
||||
if (!EC_KEY_set_public_key(eckey, Q)) { ret=-2; goto err; }
|
||||
|
||||
ret = 1;
|
||||
|
||||
err:
|
||||
if (ctx) {
|
||||
BN_CTX_end(ctx);
|
||||
BN_CTX_free(ctx);
|
||||
}
|
||||
if (R != NULL) EC_POINT_free(R);
|
||||
if (O != NULL) EC_POINT_free(O);
|
||||
if (Q != NULL) EC_POINT_free(Q);
|
||||
return ret;
|
||||
}
|
||||
|
||||
void CKey::SetCompressedPubKey()
|
||||
{
|
||||
EC_KEY_set_conv_form(pkey, POINT_CONVERSION_COMPRESSED);
|
||||
fCompressedPubKey = true;
|
||||
}
|
||||
|
||||
void CKey::Reset()
|
||||
{
|
||||
fCompressedPubKey = false;
|
||||
pkey = EC_KEY_new_by_curve_name(NID_secp256k1);
|
||||
if (pkey == NULL)
|
||||
throw key_error("CKey::CKey() : EC_KEY_new_by_curve_name failed");
|
||||
fSet = false;
|
||||
}
|
||||
|
||||
CKey::CKey()
|
||||
{
|
||||
Reset();
|
||||
}
|
||||
|
||||
CKey::CKey(const CKey& b)
|
||||
{
|
||||
pkey = EC_KEY_dup(b.pkey);
|
||||
if (pkey == NULL)
|
||||
throw key_error("CKey::CKey(const CKey&) : EC_KEY_dup failed");
|
||||
fSet = b.fSet;
|
||||
}
|
||||
|
||||
CKey& CKey::operator=(const CKey& b)
|
||||
{
|
||||
if (!EC_KEY_copy(pkey, b.pkey))
|
||||
throw key_error("CKey::operator=(const CKey&) : EC_KEY_copy failed");
|
||||
fSet = b.fSet;
|
||||
return (*this);
|
||||
}
|
||||
|
||||
CKey::~CKey()
|
||||
{
|
||||
EC_KEY_free(pkey);
|
||||
}
|
||||
|
||||
bool CKey::IsNull() const
|
||||
{
|
||||
return !fSet;
|
||||
}
|
||||
|
||||
bool CKey::IsCompressed() const
|
||||
{
|
||||
return fCompressedPubKey;
|
||||
}
|
||||
|
||||
void CKey::MakeNewKey(bool fCompressed)
|
||||
{
|
||||
if (!EC_KEY_generate_key(pkey))
|
||||
throw key_error("CKey::MakeNewKey() : EC_KEY_generate_key failed");
|
||||
if (fCompressed)
|
||||
SetCompressedPubKey();
|
||||
fSet = true;
|
||||
}
|
||||
|
||||
bool CKey::SetPrivKey(const CPrivKey& vchPrivKey)
|
||||
{
|
||||
const unsigned char* pbegin = &vchPrivKey[0];
|
||||
if (!d2i_ECPrivateKey(&pkey, &pbegin, vchPrivKey.size()))
|
||||
return false;
|
||||
fSet = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool CKey::SetSecret(const CSecret& vchSecret, bool fCompressed)
|
||||
{
|
||||
EC_KEY_free(pkey);
|
||||
pkey = EC_KEY_new_by_curve_name(NID_secp256k1);
|
||||
if (pkey == NULL)
|
||||
throw key_error("CKey::SetSecret() : EC_KEY_new_by_curve_name failed");
|
||||
if (vchSecret.size() != 32)
|
||||
throw key_error("CKey::SetSecret() : secret must be 32 bytes");
|
||||
BIGNUM *bn = BN_bin2bn(&vchSecret[0],32,BN_new());
|
||||
if (bn == NULL)
|
||||
throw key_error("CKey::SetSecret() : BN_bin2bn failed");
|
||||
if (!EC_KEY_regenerate_key(pkey,bn))
|
||||
{
|
||||
BN_clear_free(bn);
|
||||
throw key_error("CKey::SetSecret() : EC_KEY_regenerate_key failed");
|
||||
}
|
||||
BN_clear_free(bn);
|
||||
fSet = true;
|
||||
if (fCompressed || fCompressedPubKey)
|
||||
SetCompressedPubKey();
|
||||
return true;
|
||||
}
|
||||
|
||||
CSecret CKey::GetSecret(bool &fCompressed) const
|
||||
{
|
||||
CSecret vchRet;
|
||||
vchRet.resize(32);
|
||||
const BIGNUM *bn = EC_KEY_get0_private_key(pkey);
|
||||
int nBytes = BN_num_bytes(bn);
|
||||
if (bn == NULL)
|
||||
throw key_error("CKey::GetSecret() : EC_KEY_get0_private_key failed");
|
||||
int n=BN_bn2bin(bn,&vchRet[32 - nBytes]);
|
||||
if (n != nBytes)
|
||||
throw key_error("CKey::GetSecret(): BN_bn2bin failed");
|
||||
fCompressed = fCompressedPubKey;
|
||||
return vchRet;
|
||||
}
|
||||
|
||||
CPrivKey CKey::GetPrivKey() const
|
||||
{
|
||||
int nSize = i2d_ECPrivateKey(pkey, NULL);
|
||||
if (!nSize)
|
||||
throw key_error("CKey::GetPrivKey() : i2d_ECPrivateKey failed");
|
||||
CPrivKey vchPrivKey(nSize, 0);
|
||||
unsigned char* pbegin = &vchPrivKey[0];
|
||||
if (i2d_ECPrivateKey(pkey, &pbegin) != nSize)
|
||||
throw key_error("CKey::GetPrivKey() : i2d_ECPrivateKey returned unexpected size");
|
||||
return vchPrivKey;
|
||||
}
|
||||
|
||||
bool CKey::SetPubKey(const CPubKey& vchPubKey)
|
||||
{
|
||||
const unsigned char* pbegin = &vchPubKey.vchPubKey[0];
|
||||
if (!o2i_ECPublicKey(&pkey, &pbegin, vchPubKey.vchPubKey.size()))
|
||||
return false;
|
||||
fSet = true;
|
||||
if (vchPubKey.vchPubKey.size() == 33)
|
||||
SetCompressedPubKey();
|
||||
return true;
|
||||
}
|
||||
|
||||
CPubKey CKey::GetPubKey() const
|
||||
{
|
||||
int nSize = i2o_ECPublicKey(pkey, NULL);
|
||||
if (!nSize)
|
||||
throw key_error("CKey::GetPubKey() : i2o_ECPublicKey failed");
|
||||
std::vector<unsigned char> vchPubKey(nSize, 0);
|
||||
unsigned char* pbegin = &vchPubKey[0];
|
||||
if (i2o_ECPublicKey(pkey, &pbegin) != nSize)
|
||||
throw key_error("CKey::GetPubKey() : i2o_ECPublicKey returned unexpected size");
|
||||
return CPubKey(vchPubKey);
|
||||
}
|
||||
|
||||
bool CKey::Sign(uint256 hash, std::vector<unsigned char>& vchSig)
|
||||
{
|
||||
unsigned int nSize = ECDSA_size(pkey);
|
||||
vchSig.resize(nSize); // Make sure it is big enough
|
||||
if (!ECDSA_sign(0, (unsigned char*)&hash, sizeof(hash), &vchSig[0], &nSize, pkey))
|
||||
{
|
||||
vchSig.clear();
|
||||
return false;
|
||||
}
|
||||
vchSig.resize(nSize); // Shrink to fit actual size
|
||||
return true;
|
||||
}
|
||||
|
||||
// create a compact signature (65 bytes), which allows reconstructing the used public key
|
||||
// The format is one header byte, followed by two times 32 bytes for the serialized r and s values.
|
||||
// The header byte: 0x1B = first key with even y, 0x1C = first key with odd y,
|
||||
// 0x1D = second key with even y, 0x1E = second key with odd y
|
||||
bool CKey::SignCompact(uint256 hash, std::vector<unsigned char>& vchSig)
|
||||
{
|
||||
bool fOk = false;
|
||||
ECDSA_SIG *sig = ECDSA_do_sign((unsigned char*)&hash, sizeof(hash), pkey);
|
||||
if (sig==NULL)
|
||||
return false;
|
||||
vchSig.clear();
|
||||
vchSig.resize(65,0);
|
||||
int nBitsR = BN_num_bits(sig->r);
|
||||
int nBitsS = BN_num_bits(sig->s);
|
||||
if (nBitsR <= 256 && nBitsS <= 256)
|
||||
{
|
||||
int nRecId = -1;
|
||||
for (int i=0; i<4; i++)
|
||||
{
|
||||
CKey keyRec;
|
||||
keyRec.fSet = true;
|
||||
if (fCompressedPubKey)
|
||||
keyRec.SetCompressedPubKey();
|
||||
if (ECDSA_SIG_recover_key_GFp(keyRec.pkey, sig, (unsigned char*)&hash, sizeof(hash), i, 1) == 1)
|
||||
if (keyRec.GetPubKey() == this->GetPubKey())
|
||||
{
|
||||
nRecId = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (nRecId == -1)
|
||||
throw key_error("CKey::SignCompact() : unable to construct recoverable key");
|
||||
|
||||
vchSig[0] = nRecId+27+(fCompressedPubKey ? 4 : 0);
|
||||
BN_bn2bin(sig->r,&vchSig[33-(nBitsR+7)/8]);
|
||||
BN_bn2bin(sig->s,&vchSig[65-(nBitsS+7)/8]);
|
||||
fOk = true;
|
||||
}
|
||||
ECDSA_SIG_free(sig);
|
||||
return fOk;
|
||||
}
|
||||
|
||||
// reconstruct public key from a compact signature
|
||||
// This is only slightly more CPU intensive than just verifying it.
|
||||
// If this function succeeds, the recovered public key is guaranteed to be valid
|
||||
// (the signature is a valid signature of the given data for that key)
|
||||
bool CKey::SetCompactSignature(uint256 hash, const std::vector<unsigned char>& vchSig)
|
||||
{
|
||||
if (vchSig.size() != 65)
|
||||
return false;
|
||||
int nV = vchSig[0];
|
||||
if (nV<27 || nV>=35)
|
||||
return false;
|
||||
ECDSA_SIG *sig = ECDSA_SIG_new();
|
||||
BN_bin2bn(&vchSig[1],32,sig->r);
|
||||
BN_bin2bn(&vchSig[33],32,sig->s);
|
||||
|
||||
EC_KEY_free(pkey);
|
||||
pkey = EC_KEY_new_by_curve_name(NID_secp256k1);
|
||||
if (nV >= 31)
|
||||
{
|
||||
SetCompressedPubKey();
|
||||
nV -= 4;
|
||||
}
|
||||
if (ECDSA_SIG_recover_key_GFp(pkey, sig, (unsigned char*)&hash, sizeof(hash), nV - 27, 0) == 1)
|
||||
{
|
||||
fSet = true;
|
||||
ECDSA_SIG_free(sig);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool CKey::Verify(uint256 hash, const std::vector<unsigned char>& vchSig)
|
||||
{
|
||||
// -1 = error, 0 = bad sig, 1 = good
|
||||
if (ECDSA_verify(0, (unsigned char*)&hash, sizeof(hash), &vchSig[0], vchSig.size(), pkey) != 1)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool CKey::VerifyCompact(uint256 hash, const std::vector<unsigned char>& vchSig)
|
||||
{
|
||||
CKey key;
|
||||
if (!key.SetCompactSignature(hash, vchSig))
|
||||
return false;
|
||||
if (GetPubKey() != key.GetPubKey())
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool CKey::IsValid()
|
||||
{
|
||||
if (!fSet)
|
||||
return false;
|
||||
|
||||
bool fCompr;
|
||||
CSecret secret = GetSecret(fCompr);
|
||||
CKey key2;
|
||||
key2.SetSecret(secret, fCompr);
|
||||
return GetPubKey() == key2.GetPubKey();
|
||||
}
|
||||
162
test/fixtures/cpp/key.h
vendored
Normal file
162
test/fixtures/cpp/key.h
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
// Copyright (c) 2009-2010 Satoshi Nakamoto
|
||||
// Copyright (c) 2009-2012 The Bitcoin developers
|
||||
// Distributed under the MIT/X11 software license, see the accompanying
|
||||
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
#ifndef BITCOIN_KEY_H
|
||||
#define BITCOIN_KEY_H
|
||||
|
||||
#include <stdexcept>
|
||||
#include <vector>
|
||||
|
||||
#include "allocators.h"
|
||||
#include "serialize.h"
|
||||
#include "uint256.h"
|
||||
#include "util.h"
|
||||
|
||||
#include <openssl/ec.h> // for EC_KEY definition
|
||||
|
||||
// secp160k1
|
||||
// const unsigned int PRIVATE_KEY_SIZE = 192;
|
||||
// const unsigned int PUBLIC_KEY_SIZE = 41;
|
||||
// const unsigned int SIGNATURE_SIZE = 48;
|
||||
//
|
||||
// secp192k1
|
||||
// const unsigned int PRIVATE_KEY_SIZE = 222;
|
||||
// const unsigned int PUBLIC_KEY_SIZE = 49;
|
||||
// const unsigned int SIGNATURE_SIZE = 57;
|
||||
//
|
||||
// secp224k1
|
||||
// const unsigned int PRIVATE_KEY_SIZE = 250;
|
||||
// const unsigned int PUBLIC_KEY_SIZE = 57;
|
||||
// const unsigned int SIGNATURE_SIZE = 66;
|
||||
//
|
||||
// secp256k1:
|
||||
// const unsigned int PRIVATE_KEY_SIZE = 279;
|
||||
// const unsigned int PUBLIC_KEY_SIZE = 65;
|
||||
// const unsigned int SIGNATURE_SIZE = 72;
|
||||
//
|
||||
// see www.keylength.com
|
||||
// script supports up to 75 for single byte push
|
||||
|
||||
class key_error : public std::runtime_error
|
||||
{
|
||||
public:
|
||||
explicit key_error(const std::string& str) : std::runtime_error(str) {}
|
||||
};
|
||||
|
||||
/** A reference to a CKey: the Hash160 of its serialized public key */
|
||||
class CKeyID : public uint160
|
||||
{
|
||||
public:
|
||||
CKeyID() : uint160(0) { }
|
||||
CKeyID(const uint160 &in) : uint160(in) { }
|
||||
};
|
||||
|
||||
/** A reference to a CScript: the Hash160 of its serialization (see script.h) */
|
||||
class CScriptID : public uint160
|
||||
{
|
||||
public:
|
||||
CScriptID() : uint160(0) { }
|
||||
CScriptID(const uint160 &in) : uint160(in) { }
|
||||
};
|
||||
|
||||
/** An encapsulated public key. */
|
||||
class CPubKey {
|
||||
private:
|
||||
std::vector<unsigned char> vchPubKey;
|
||||
friend class CKey;
|
||||
|
||||
public:
|
||||
CPubKey() { }
|
||||
CPubKey(const std::vector<unsigned char> &vchPubKeyIn) : vchPubKey(vchPubKeyIn) { }
|
||||
friend bool operator==(const CPubKey &a, const CPubKey &b) { return a.vchPubKey == b.vchPubKey; }
|
||||
friend bool operator!=(const CPubKey &a, const CPubKey &b) { return a.vchPubKey != b.vchPubKey; }
|
||||
friend bool operator<(const CPubKey &a, const CPubKey &b) { return a.vchPubKey < b.vchPubKey; }
|
||||
|
||||
IMPLEMENT_SERIALIZE(
|
||||
READWRITE(vchPubKey);
|
||||
)
|
||||
|
||||
CKeyID GetID() const {
|
||||
return CKeyID(Hash160(vchPubKey));
|
||||
}
|
||||
|
||||
uint256 GetHash() const {
|
||||
return Hash(vchPubKey.begin(), vchPubKey.end());
|
||||
}
|
||||
|
||||
bool IsValid() const {
|
||||
return vchPubKey.size() == 33 || vchPubKey.size() == 65;
|
||||
}
|
||||
|
||||
bool IsCompressed() const {
|
||||
return vchPubKey.size() == 33;
|
||||
}
|
||||
|
||||
std::vector<unsigned char> Raw() const {
|
||||
return vchPubKey;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// secure_allocator is defined in serialize.h
|
||||
// CPrivKey is a serialized private key, with all parameters included (279 bytes)
|
||||
typedef std::vector<unsigned char, secure_allocator<unsigned char> > CPrivKey;
|
||||
// CSecret is a serialization of just the secret parameter (32 bytes)
|
||||
typedef std::vector<unsigned char, secure_allocator<unsigned char> > CSecret;
|
||||
|
||||
/** An encapsulated OpenSSL Elliptic Curve key (public and/or private) */
|
||||
class CKey
|
||||
{
|
||||
protected:
|
||||
EC_KEY* pkey;
|
||||
bool fSet;
|
||||
bool fCompressedPubKey;
|
||||
|
||||
void SetCompressedPubKey();
|
||||
|
||||
public:
|
||||
|
||||
void Reset();
|
||||
|
||||
CKey();
|
||||
CKey(const CKey& b);
|
||||
|
||||
CKey& operator=(const CKey& b);
|
||||
|
||||
~CKey();
|
||||
|
||||
bool IsNull() const;
|
||||
bool IsCompressed() const;
|
||||
|
||||
void MakeNewKey(bool fCompressed);
|
||||
bool SetPrivKey(const CPrivKey& vchPrivKey);
|
||||
bool SetSecret(const CSecret& vchSecret, bool fCompressed = false);
|
||||
CSecret GetSecret(bool &fCompressed) const;
|
||||
CPrivKey GetPrivKey() const;
|
||||
bool SetPubKey(const CPubKey& vchPubKey);
|
||||
CPubKey GetPubKey() const;
|
||||
|
||||
bool Sign(uint256 hash, std::vector<unsigned char>& vchSig);
|
||||
|
||||
// create a compact signature (65 bytes), which allows reconstructing the used public key
|
||||
// The format is one header byte, followed by two times 32 bytes for the serialized r and s values.
|
||||
// The header byte: 0x1B = first key with even y, 0x1C = first key with odd y,
|
||||
// 0x1D = second key with even y, 0x1E = second key with odd y
|
||||
bool SignCompact(uint256 hash, std::vector<unsigned char>& vchSig);
|
||||
|
||||
// reconstruct public key from a compact signature
|
||||
// This is only slightly more CPU intensive than just verifying it.
|
||||
// If this function succeeds, the recovered public key is guaranteed to be valid
|
||||
// (the signature is a valid signature of the given data for that key)
|
||||
bool SetCompactSignature(uint256 hash, const std::vector<unsigned char>& vchSig);
|
||||
|
||||
bool Verify(uint256 hash, const std::vector<unsigned char>& vchSig);
|
||||
|
||||
// Verify a compact signature
|
||||
bool VerifyCompact(uint256 hash, const std::vector<unsigned char>& vchSig);
|
||||
|
||||
bool IsValid();
|
||||
};
|
||||
|
||||
#endif
|
||||
74
test/fixtures/cpp/main.cpp
vendored
Normal file
74
test/fixtures/cpp/main.cpp
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
/*
|
||||
This file is part of the PhantomJS project from Ofi Labs.
|
||||
|
||||
Copyright (C) 2011 Ariya Hidayat <ariya.hidayat@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of the <organization> nor the
|
||||
names of its contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
||||
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include "consts.h"
|
||||
#include "utils.h"
|
||||
#include "env.h"
|
||||
#include "phantom.h"
|
||||
|
||||
#ifdef Q_OS_LINUX
|
||||
#include "client/linux/handler/exception_handler.h"
|
||||
#endif
|
||||
|
||||
#include <QApplication>
|
||||
|
||||
#if QT_VERSION != QT_VERSION_CHECK(4, 8, 0)
|
||||
#error Something is wrong with the setup. Please report to the mailing list!
|
||||
#endif
|
||||
|
||||
int main(int argc, char** argv, const char** envp)
|
||||
{
|
||||
#ifdef Q_OS_LINUX
|
||||
google_breakpad::ExceptionHandler eh("/tmp", NULL, Utils::exceptionHandler, NULL, true);
|
||||
#endif
|
||||
|
||||
// Registering an alternative Message Handler
|
||||
qInstallMsgHandler(Utils::messageHandler);
|
||||
|
||||
QApplication app(argc, argv);
|
||||
|
||||
#ifdef STATIC_BUILD
|
||||
Q_INIT_RESOURCE(WebKit);
|
||||
Q_INIT_RESOURCE(InspectorBackendStub);
|
||||
#endif
|
||||
|
||||
app.setWindowIcon(QIcon(":/phantomjs-icon.png"));
|
||||
app.setApplicationName("PhantomJS");
|
||||
app.setOrganizationName("Ofi Labs");
|
||||
app.setOrganizationDomain("www.ofilabs.com");
|
||||
app.setApplicationVersion(PHANTOMJS_VERSION_STRING);
|
||||
|
||||
Env::instance()->parse(envp);
|
||||
|
||||
Phantom phantom;
|
||||
if (phantom.execute()) {
|
||||
app.exec();
|
||||
}
|
||||
return phantom.returnValue();
|
||||
}
|
||||
1088
test/fixtures/cpp/scanner.cc
vendored
Normal file
1088
test/fixtures/cpp/scanner.cc
vendored
Normal file
File diff suppressed because it is too large
Load Diff
576
test/fixtures/cpp/scanner.h
vendored
Normal file
576
test/fixtures/cpp/scanner.h
vendored
Normal file
@@ -0,0 +1,576 @@
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following
|
||||
// disclaimer in the documentation and/or other materials provided
|
||||
// with the distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived
|
||||
// from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Features shared by parsing and pre-parsing scanners.
|
||||
|
||||
#ifndef V8_SCANNER_H_
|
||||
#define V8_SCANNER_H_
|
||||
|
||||
#include "allocation.h"
|
||||
#include "char-predicates.h"
|
||||
#include "checks.h"
|
||||
#include "globals.h"
|
||||
#include "token.h"
|
||||
#include "unicode-inl.h"
|
||||
#include "utils.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
|
||||
// General collection of (multi-)bit-flags that can be passed to scanners and
|
||||
// parsers to signify their (initial) mode of operation.
|
||||
enum ParsingFlags {
|
||||
kNoParsingFlags = 0,
|
||||
// Embed LanguageMode values in parsing flags, i.e., equivalent to:
|
||||
// CLASSIC_MODE = 0,
|
||||
// STRICT_MODE,
|
||||
// EXTENDED_MODE,
|
||||
kLanguageModeMask = 0x03,
|
||||
kAllowLazy = 0x04,
|
||||
kAllowNativesSyntax = 0x08,
|
||||
kAllowModules = 0x10
|
||||
};
|
||||
|
||||
STATIC_ASSERT((kLanguageModeMask & CLASSIC_MODE) == CLASSIC_MODE);
|
||||
STATIC_ASSERT((kLanguageModeMask & STRICT_MODE) == STRICT_MODE);
|
||||
STATIC_ASSERT((kLanguageModeMask & EXTENDED_MODE) == EXTENDED_MODE);
|
||||
|
||||
|
||||
// Returns the value (0 .. 15) of a hexadecimal character c.
|
||||
// If c is not a legal hexadecimal character, returns a value < 0.
|
||||
inline int HexValue(uc32 c) {
|
||||
c -= '0';
|
||||
if (static_cast<unsigned>(c) <= 9) return c;
|
||||
c = (c | 0x20) - ('a' - '0'); // detect 0x11..0x16 and 0x31..0x36.
|
||||
if (static_cast<unsigned>(c) <= 5) return c + 10;
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
// Buffered stream of UTF-16 code units, using an internal UTF-16 buffer.
|
||||
// A code unit is a 16 bit value representing either a 16 bit code point
|
||||
// or one part of a surrogate pair that make a single 21 bit code point.
|
||||
|
||||
class Utf16CharacterStream {
|
||||
public:
|
||||
Utf16CharacterStream() : pos_(0) { }
|
||||
virtual ~Utf16CharacterStream() { }
|
||||
|
||||
// Returns and advances past the next UTF-16 code unit in the input
|
||||
// stream. If there are no more code units, it returns a negative
|
||||
// value.
|
||||
inline uc32 Advance() {
|
||||
if (buffer_cursor_ < buffer_end_ || ReadBlock()) {
|
||||
pos_++;
|
||||
return static_cast<uc32>(*(buffer_cursor_++));
|
||||
}
|
||||
// Note: currently the following increment is necessary to avoid a
|
||||
// parser problem! The scanner treats the final kEndOfInput as
|
||||
// a code unit with a position, and does math relative to that
|
||||
// position.
|
||||
pos_++;
|
||||
|
||||
return kEndOfInput;
|
||||
}
|
||||
|
||||
// Return the current position in the code unit stream.
|
||||
// Starts at zero.
|
||||
inline unsigned pos() const { return pos_; }
|
||||
|
||||
// Skips forward past the next code_unit_count UTF-16 code units
|
||||
// in the input, or until the end of input if that comes sooner.
|
||||
// Returns the number of code units actually skipped. If less
|
||||
// than code_unit_count,
|
||||
inline unsigned SeekForward(unsigned code_unit_count) {
|
||||
unsigned buffered_chars =
|
||||
static_cast<unsigned>(buffer_end_ - buffer_cursor_);
|
||||
if (code_unit_count <= buffered_chars) {
|
||||
buffer_cursor_ += code_unit_count;
|
||||
pos_ += code_unit_count;
|
||||
return code_unit_count;
|
||||
}
|
||||
return SlowSeekForward(code_unit_count);
|
||||
}
|
||||
|
||||
// Pushes back the most recently read UTF-16 code unit (or negative
|
||||
// value if at end of input), i.e., the value returned by the most recent
|
||||
// call to Advance.
|
||||
// Must not be used right after calling SeekForward.
|
||||
virtual void PushBack(int32_t code_unit) = 0;
|
||||
|
||||
protected:
|
||||
static const uc32 kEndOfInput = -1;
|
||||
|
||||
// Ensures that the buffer_cursor_ points to the code_unit at
|
||||
// position pos_ of the input, if possible. If the position
|
||||
// is at or after the end of the input, return false. If there
|
||||
// are more code_units available, return true.
|
||||
virtual bool ReadBlock() = 0;
|
||||
virtual unsigned SlowSeekForward(unsigned code_unit_count) = 0;
|
||||
|
||||
const uc16* buffer_cursor_;
|
||||
const uc16* buffer_end_;
|
||||
unsigned pos_;
|
||||
};
|
||||
|
||||
|
||||
class UnicodeCache {
|
||||
// ---------------------------------------------------------------------
|
||||
// Caching predicates used by scanners.
|
||||
public:
|
||||
UnicodeCache() {}
|
||||
typedef unibrow::Utf8InputBuffer<1024> Utf8Decoder;
|
||||
|
||||
StaticResource<Utf8Decoder>* utf8_decoder() {
|
||||
return &utf8_decoder_;
|
||||
}
|
||||
|
||||
bool IsIdentifierStart(unibrow::uchar c) { return kIsIdentifierStart.get(c); }
|
||||
bool IsIdentifierPart(unibrow::uchar c) { return kIsIdentifierPart.get(c); }
|
||||
bool IsLineTerminator(unibrow::uchar c) { return kIsLineTerminator.get(c); }
|
||||
bool IsWhiteSpace(unibrow::uchar c) { return kIsWhiteSpace.get(c); }
|
||||
|
||||
private:
|
||||
unibrow::Predicate<IdentifierStart, 128> kIsIdentifierStart;
|
||||
unibrow::Predicate<IdentifierPart, 128> kIsIdentifierPart;
|
||||
unibrow::Predicate<unibrow::LineTerminator, 128> kIsLineTerminator;
|
||||
unibrow::Predicate<unibrow::WhiteSpace, 128> kIsWhiteSpace;
|
||||
StaticResource<Utf8Decoder> utf8_decoder_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(UnicodeCache);
|
||||
};
|
||||
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// LiteralBuffer - Collector of chars of literals.
|
||||
|
||||
class LiteralBuffer {
|
||||
public:
|
||||
LiteralBuffer() : is_ascii_(true), position_(0), backing_store_() { }
|
||||
|
||||
~LiteralBuffer() {
|
||||
if (backing_store_.length() > 0) {
|
||||
backing_store_.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
INLINE(void AddChar(uint32_t code_unit)) {
|
||||
if (position_ >= backing_store_.length()) ExpandBuffer();
|
||||
if (is_ascii_) {
|
||||
if (code_unit < kMaxAsciiCharCodeU) {
|
||||
backing_store_[position_] = static_cast<byte>(code_unit);
|
||||
position_ += kASCIISize;
|
||||
return;
|
||||
}
|
||||
ConvertToUtf16();
|
||||
}
|
||||
ASSERT(code_unit < 0x10000u);
|
||||
*reinterpret_cast<uc16*>(&backing_store_[position_]) = code_unit;
|
||||
position_ += kUC16Size;
|
||||
}
|
||||
|
||||
bool is_ascii() { return is_ascii_; }
|
||||
|
||||
Vector<const uc16> utf16_literal() {
|
||||
ASSERT(!is_ascii_);
|
||||
ASSERT((position_ & 0x1) == 0);
|
||||
return Vector<const uc16>(
|
||||
reinterpret_cast<const uc16*>(backing_store_.start()),
|
||||
position_ >> 1);
|
||||
}
|
||||
|
||||
Vector<const char> ascii_literal() {
|
||||
ASSERT(is_ascii_);
|
||||
return Vector<const char>(
|
||||
reinterpret_cast<const char*>(backing_store_.start()),
|
||||
position_);
|
||||
}
|
||||
|
||||
int length() {
|
||||
return is_ascii_ ? position_ : (position_ >> 1);
|
||||
}
|
||||
|
||||
void Reset() {
|
||||
position_ = 0;
|
||||
is_ascii_ = true;
|
||||
}
|
||||
|
||||
private:
|
||||
static const int kInitialCapacity = 16;
|
||||
static const int kGrowthFactory = 4;
|
||||
static const int kMinConversionSlack = 256;
|
||||
static const int kMaxGrowth = 1 * MB;
|
||||
inline int NewCapacity(int min_capacity) {
|
||||
int capacity = Max(min_capacity, backing_store_.length());
|
||||
int new_capacity = Min(capacity * kGrowthFactory, capacity + kMaxGrowth);
|
||||
return new_capacity;
|
||||
}
|
||||
|
||||
void ExpandBuffer() {
|
||||
Vector<byte> new_store = Vector<byte>::New(NewCapacity(kInitialCapacity));
|
||||
memcpy(new_store.start(), backing_store_.start(), position_);
|
||||
backing_store_.Dispose();
|
||||
backing_store_ = new_store;
|
||||
}
|
||||
|
||||
void ConvertToUtf16() {
|
||||
ASSERT(is_ascii_);
|
||||
Vector<byte> new_store;
|
||||
int new_content_size = position_ * kUC16Size;
|
||||
if (new_content_size >= backing_store_.length()) {
|
||||
// Ensure room for all currently read code units as UC16 as well
|
||||
// as the code unit about to be stored.
|
||||
new_store = Vector<byte>::New(NewCapacity(new_content_size));
|
||||
} else {
|
||||
new_store = backing_store_;
|
||||
}
|
||||
char* src = reinterpret_cast<char*>(backing_store_.start());
|
||||
uc16* dst = reinterpret_cast<uc16*>(new_store.start());
|
||||
for (int i = position_ - 1; i >= 0; i--) {
|
||||
dst[i] = src[i];
|
||||
}
|
||||
if (new_store.start() != backing_store_.start()) {
|
||||
backing_store_.Dispose();
|
||||
backing_store_ = new_store;
|
||||
}
|
||||
position_ = new_content_size;
|
||||
is_ascii_ = false;
|
||||
}
|
||||
|
||||
bool is_ascii_;
|
||||
int position_;
|
||||
Vector<byte> backing_store_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(LiteralBuffer);
|
||||
};
|
||||
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// JavaScript Scanner.
|
||||
|
||||
class Scanner {
|
||||
public:
|
||||
// Scoped helper for literal recording. Automatically drops the literal
|
||||
// if aborting the scanning before it's complete.
|
||||
class LiteralScope {
|
||||
public:
|
||||
explicit LiteralScope(Scanner* self)
|
||||
: scanner_(self), complete_(false) {
|
||||
scanner_->StartLiteral();
|
||||
}
|
||||
~LiteralScope() {
|
||||
if (!complete_) scanner_->DropLiteral();
|
||||
}
|
||||
void Complete() {
|
||||
scanner_->TerminateLiteral();
|
||||
complete_ = true;
|
||||
}
|
||||
|
||||
private:
|
||||
Scanner* scanner_;
|
||||
bool complete_;
|
||||
};
|
||||
|
||||
// Representation of an interval of source positions.
|
||||
struct Location {
|
||||
Location(int b, int e) : beg_pos(b), end_pos(e) { }
|
||||
Location() : beg_pos(0), end_pos(0) { }
|
||||
|
||||
bool IsValid() const {
|
||||
return beg_pos >= 0 && end_pos >= beg_pos;
|
||||
}
|
||||
|
||||
static Location invalid() { return Location(-1, -1); }
|
||||
|
||||
int beg_pos;
|
||||
int end_pos;
|
||||
};
|
||||
|
||||
// -1 is outside of the range of any real source code.
|
||||
static const int kNoOctalLocation = -1;
|
||||
|
||||
typedef unibrow::Utf8InputBuffer<1024> Utf8Decoder;
|
||||
|
||||
explicit Scanner(UnicodeCache* scanner_contants);
|
||||
|
||||
void Initialize(Utf16CharacterStream* source);
|
||||
|
||||
// Returns the next token and advances input.
|
||||
Token::Value Next();
|
||||
// Returns the current token again.
|
||||
Token::Value current_token() { return current_.token; }
|
||||
// Returns the location information for the current token
|
||||
// (the token last returned by Next()).
|
||||
Location location() const { return current_.location; }
|
||||
// Returns the literal string, if any, for the current token (the
|
||||
// token last returned by Next()). The string is 0-terminated.
|
||||
// Literal strings are collected for identifiers, strings, and
|
||||
// numbers.
|
||||
// These functions only give the correct result if the literal
|
||||
// was scanned between calls to StartLiteral() and TerminateLiteral().
|
||||
Vector<const char> literal_ascii_string() {
|
||||
ASSERT_NOT_NULL(current_.literal_chars);
|
||||
return current_.literal_chars->ascii_literal();
|
||||
}
|
||||
Vector<const uc16> literal_utf16_string() {
|
||||
ASSERT_NOT_NULL(current_.literal_chars);
|
||||
return current_.literal_chars->utf16_literal();
|
||||
}
|
||||
bool is_literal_ascii() {
|
||||
ASSERT_NOT_NULL(current_.literal_chars);
|
||||
return current_.literal_chars->is_ascii();
|
||||
}
|
||||
int literal_length() const {
|
||||
ASSERT_NOT_NULL(current_.literal_chars);
|
||||
return current_.literal_chars->length();
|
||||
}
|
||||
|
||||
bool literal_contains_escapes() const {
|
||||
Location location = current_.location;
|
||||
int source_length = (location.end_pos - location.beg_pos);
|
||||
if (current_.token == Token::STRING) {
|
||||
// Subtract delimiters.
|
||||
source_length -= 2;
|
||||
}
|
||||
return current_.literal_chars->length() != source_length;
|
||||
}
|
||||
|
||||
// Similar functions for the upcoming token.
|
||||
|
||||
// One token look-ahead (past the token returned by Next()).
|
||||
Token::Value peek() const { return next_.token; }
|
||||
|
||||
Location peek_location() const { return next_.location; }
|
||||
|
||||
// Returns the literal string for the next token (the token that
|
||||
// would be returned if Next() were called).
|
||||
Vector<const char> next_literal_ascii_string() {
|
||||
ASSERT_NOT_NULL(next_.literal_chars);
|
||||
return next_.literal_chars->ascii_literal();
|
||||
}
|
||||
Vector<const uc16> next_literal_utf16_string() {
|
||||
ASSERT_NOT_NULL(next_.literal_chars);
|
||||
return next_.literal_chars->utf16_literal();
|
||||
}
|
||||
bool is_next_literal_ascii() {
|
||||
ASSERT_NOT_NULL(next_.literal_chars);
|
||||
return next_.literal_chars->is_ascii();
|
||||
}
|
||||
int next_literal_length() const {
|
||||
ASSERT_NOT_NULL(next_.literal_chars);
|
||||
return next_.literal_chars->length();
|
||||
}
|
||||
|
||||
UnicodeCache* unicode_cache() { return unicode_cache_; }
|
||||
|
||||
static const int kCharacterLookaheadBufferSize = 1;
|
||||
|
||||
// Scans octal escape sequence. Also accepts "\0" decimal escape sequence.
|
||||
uc32 ScanOctalEscape(uc32 c, int length);
|
||||
|
||||
// Returns the location of the last seen octal literal.
|
||||
Location octal_position() const { return octal_pos_; }
|
||||
void clear_octal_position() { octal_pos_ = Location::invalid(); }
|
||||
|
||||
// Seek forward to the given position. This operation does not
|
||||
// work in general, for instance when there are pushed back
|
||||
// characters, but works for seeking forward until simple delimiter
|
||||
// tokens, which is what it is used for.
|
||||
void SeekForward(int pos);
|
||||
|
||||
bool HarmonyScoping() const {
|
||||
return harmony_scoping_;
|
||||
}
|
||||
void SetHarmonyScoping(bool scoping) {
|
||||
harmony_scoping_ = scoping;
|
||||
}
|
||||
bool HarmonyModules() const {
|
||||
return harmony_modules_;
|
||||
}
|
||||
void SetHarmonyModules(bool modules) {
|
||||
harmony_modules_ = modules;
|
||||
}
|
||||
|
||||
|
||||
// Returns true if there was a line terminator before the peek'ed token,
|
||||
// possibly inside a multi-line comment.
|
||||
bool HasAnyLineTerminatorBeforeNext() const {
|
||||
return has_line_terminator_before_next_ ||
|
||||
has_multiline_comment_before_next_;
|
||||
}
|
||||
|
||||
// Scans the input as a regular expression pattern, previous
|
||||
// character(s) must be /(=). Returns true if a pattern is scanned.
|
||||
bool ScanRegExpPattern(bool seen_equal);
|
||||
// Returns true if regexp flags are scanned (always since flags can
|
||||
// be empty).
|
||||
bool ScanRegExpFlags();
|
||||
|
||||
// Tells whether the buffer contains an identifier (no escapes).
|
||||
// Used for checking if a property name is an identifier.
|
||||
static bool IsIdentifier(unibrow::CharacterStream* buffer);
|
||||
|
||||
private:
|
||||
// The current and look-ahead token.
|
||||
struct TokenDesc {
|
||||
Token::Value token;
|
||||
Location location;
|
||||
LiteralBuffer* literal_chars;
|
||||
};
|
||||
|
||||
// Call this after setting source_ to the input.
|
||||
void Init() {
|
||||
// Set c0_ (one character ahead)
|
||||
STATIC_ASSERT(kCharacterLookaheadBufferSize == 1);
|
||||
Advance();
|
||||
// Initialize current_ to not refer to a literal.
|
||||
current_.literal_chars = NULL;
|
||||
}
|
||||
|
||||
// Literal buffer support
|
||||
inline void StartLiteral() {
|
||||
LiteralBuffer* free_buffer = (current_.literal_chars == &literal_buffer1_) ?
|
||||
&literal_buffer2_ : &literal_buffer1_;
|
||||
free_buffer->Reset();
|
||||
next_.literal_chars = free_buffer;
|
||||
}
|
||||
|
||||
INLINE(void AddLiteralChar(uc32 c)) {
|
||||
ASSERT_NOT_NULL(next_.literal_chars);
|
||||
next_.literal_chars->AddChar(c);
|
||||
}
|
||||
|
||||
// Complete scanning of a literal.
|
||||
inline void TerminateLiteral() {
|
||||
// Does nothing in the current implementation.
|
||||
}
|
||||
|
||||
// Stops scanning of a literal and drop the collected characters,
|
||||
// e.g., due to an encountered error.
|
||||
inline void DropLiteral() {
|
||||
next_.literal_chars = NULL;
|
||||
}
|
||||
|
||||
inline void AddLiteralCharAdvance() {
|
||||
AddLiteralChar(c0_);
|
||||
Advance();
|
||||
}
|
||||
|
||||
// Low-level scanning support.
|
||||
void Advance() { c0_ = source_->Advance(); }
|
||||
void PushBack(uc32 ch) {
|
||||
source_->PushBack(c0_);
|
||||
c0_ = ch;
|
||||
}
|
||||
|
||||
inline Token::Value Select(Token::Value tok) {
|
||||
Advance();
|
||||
return tok;
|
||||
}
|
||||
|
||||
inline Token::Value Select(uc32 next, Token::Value then, Token::Value else_) {
|
||||
Advance();
|
||||
if (c0_ == next) {
|
||||
Advance();
|
||||
return then;
|
||||
} else {
|
||||
return else_;
|
||||
}
|
||||
}
|
||||
|
||||
uc32 ScanHexNumber(int expected_length);
|
||||
|
||||
// Scans a single JavaScript token.
|
||||
void Scan();
|
||||
|
||||
bool SkipWhiteSpace();
|
||||
Token::Value SkipSingleLineComment();
|
||||
Token::Value SkipMultiLineComment();
|
||||
// Scans a possible HTML comment -- begins with '<!'.
|
||||
Token::Value ScanHtmlComment();
|
||||
|
||||
void ScanDecimalDigits();
|
||||
Token::Value ScanNumber(bool seen_period);
|
||||
Token::Value ScanIdentifierOrKeyword();
|
||||
Token::Value ScanIdentifierSuffix(LiteralScope* literal);
|
||||
|
||||
Token::Value ScanString();
|
||||
|
||||
// Scans an escape-sequence which is part of a string and adds the
|
||||
// decoded character to the current literal. Returns true if a pattern
|
||||
// is scanned.
|
||||
bool ScanEscape();
|
||||
// Decodes a Unicode escape-sequence which is part of an identifier.
|
||||
// If the escape sequence cannot be decoded the result is kBadChar.
|
||||
uc32 ScanIdentifierUnicodeEscape();
|
||||
// Scans a Unicode escape-sequence and adds its characters,
|
||||
// uninterpreted, to the current literal. Used for parsing RegExp
|
||||
// flags.
|
||||
bool ScanLiteralUnicodeEscape();
|
||||
|
||||
// Return the current source position.
|
||||
int source_pos() {
|
||||
return source_->pos() - kCharacterLookaheadBufferSize;
|
||||
}
|
||||
|
||||
UnicodeCache* unicode_cache_;
|
||||
|
||||
// Buffers collecting literal strings, numbers, etc.
|
||||
LiteralBuffer literal_buffer1_;
|
||||
LiteralBuffer literal_buffer2_;
|
||||
|
||||
TokenDesc current_; // desc for current token (as returned by Next())
|
||||
TokenDesc next_; // desc for next token (one token look-ahead)
|
||||
|
||||
// Input stream. Must be initialized to an Utf16CharacterStream.
|
||||
Utf16CharacterStream* source_;
|
||||
|
||||
|
||||
// Start position of the octal literal last scanned.
|
||||
Location octal_pos_;
|
||||
|
||||
// One Unicode character look-ahead; c0_ < 0 at the end of the input.
|
||||
uc32 c0_;
|
||||
|
||||
// Whether there is a line terminator whitespace character after
|
||||
// the current token, and before the next. Does not count newlines
|
||||
// inside multiline comments.
|
||||
bool has_line_terminator_before_next_;
|
||||
// Whether there is a multi-line comment that contains a
|
||||
// line-terminator after the current token, and before the next.
|
||||
bool has_multiline_comment_before_next_;
|
||||
// Whether we scan 'let' as a keyword for harmony block-scoped let bindings.
|
||||
bool harmony_scoping_;
|
||||
// Whether we scan 'module', 'import', 'export' as keywords.
|
||||
bool harmony_modules_;
|
||||
};
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
||||
#endif // V8_SCANNER_H_
|
||||
71
test/fixtures/cpp/utils.h
vendored
Normal file
71
test/fixtures/cpp/utils.h
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
/*
|
||||
This file is part of the PhantomJS project from Ofi Labs.
|
||||
|
||||
Copyright (C) 2011 Ariya Hidayat <ariya.hidayat@gmail.com>
|
||||
Copyright (C) 2011 Ivan De Marino <ivan.de.marino@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of the <organization> nor the
|
||||
names of its contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
||||
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef UTILS_H
|
||||
#define UTILS_H
|
||||
|
||||
#include <QtGlobal>
|
||||
#include <QWebFrame>
|
||||
#include <QFile>
|
||||
|
||||
#include "csconverter.h"
|
||||
#include "encoding.h"
|
||||
|
||||
class QTemporaryFile;
|
||||
/**
|
||||
* Aggregate common utility functions.
|
||||
* Functions are static methods.
|
||||
* It's important to notice that, at the moment, this class can't be instantiated by design.
|
||||
*/
|
||||
class Utils
|
||||
{
|
||||
public:
|
||||
static void showUsage();
|
||||
static void messageHandler(QtMsgType type, const char *msg);
|
||||
static bool exceptionHandler(const char* dump_path, const char* minidump_id, void* context, bool succeeded);
|
||||
static QVariant coffee2js(const QString &script);
|
||||
static bool injectJsInFrame(const QString &jsFilePath, const QString &libraryPath, QWebFrame *targetFrame, const bool startingScript = false);
|
||||
static bool injectJsInFrame(const QString &jsFilePath, const Encoding &jsFileEnc, const QString &libraryPath, QWebFrame *targetFrame, const bool startingScript = false);
|
||||
static QString readResourceFileUtf8(const QString &resourceFilePath);
|
||||
|
||||
static bool loadJSForDebug(const QString &jsFilePath, const Encoding &jsFileEnc, const QString &libraryPath, QWebFrame *targetFrame, const bool autorun = false);
|
||||
static bool loadJSForDebug(const QString &jsFilePath, const QString &libraryPath, QWebFrame *targetFrame, const bool autorun = false);
|
||||
static void cleanupFromDebug();
|
||||
|
||||
private:
|
||||
static QString findScript(const QString &jsFilePath, const QString& libraryPath);
|
||||
static QString jsFromScriptFile(const QString& scriptPath, const Encoding& enc);
|
||||
Utils(); //< This class shouldn't be instantiated
|
||||
|
||||
static QTemporaryFile* m_tempHarness; //< We want to make sure to clean up after ourselves
|
||||
static QTemporaryFile* m_tempWrapper;
|
||||
};
|
||||
|
||||
#endif // UTILS_H
|
||||
288
test/fixtures/cpp/v8.cc
vendored
Normal file
288
test/fixtures/cpp/v8.cc
vendored
Normal file
@@ -0,0 +1,288 @@
|
||||
// Copyright 2012 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following
|
||||
// disclaimer in the documentation and/or other materials provided
|
||||
// with the distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived
|
||||
// from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#include "v8.h"
|
||||
|
||||
#include "assembler.h"
|
||||
#include "isolate.h"
|
||||
#include "elements.h"
|
||||
#include "bootstrapper.h"
|
||||
#include "debug.h"
|
||||
#include "deoptimizer.h"
|
||||
#include "frames.h"
|
||||
#include "heap-profiler.h"
|
||||
#include "hydrogen.h"
|
||||
#include "lithium-allocator.h"
|
||||
#include "log.h"
|
||||
#include "once.h"
|
||||
#include "platform.h"
|
||||
#include "runtime-profiler.h"
|
||||
#include "serialize.h"
|
||||
#include "store-buffer.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
V8_DECLARE_ONCE(init_once);
|
||||
|
||||
bool V8::is_running_ = false;
|
||||
bool V8::has_been_set_up_ = false;
|
||||
bool V8::has_been_disposed_ = false;
|
||||
bool V8::has_fatal_error_ = false;
|
||||
bool V8::use_crankshaft_ = true;
|
||||
List<CallCompletedCallback>* V8::call_completed_callbacks_ = NULL;
|
||||
|
||||
static LazyMutex entropy_mutex = LAZY_MUTEX_INITIALIZER;
|
||||
|
||||
static EntropySource entropy_source;
|
||||
|
||||
|
||||
bool V8::Initialize(Deserializer* des) {
|
||||
FlagList::EnforceFlagImplications();
|
||||
|
||||
InitializeOncePerProcess();
|
||||
|
||||
// The current thread may not yet had entered an isolate to run.
|
||||
// Note the Isolate::Current() may be non-null because for various
|
||||
// initialization purposes an initializing thread may be assigned an isolate
|
||||
// but not actually enter it.
|
||||
if (i::Isolate::CurrentPerIsolateThreadData() == NULL) {
|
||||
i::Isolate::EnterDefaultIsolate();
|
||||
}
|
||||
|
||||
ASSERT(i::Isolate::CurrentPerIsolateThreadData() != NULL);
|
||||
ASSERT(i::Isolate::CurrentPerIsolateThreadData()->thread_id().Equals(
|
||||
i::ThreadId::Current()));
|
||||
ASSERT(i::Isolate::CurrentPerIsolateThreadData()->isolate() ==
|
||||
i::Isolate::Current());
|
||||
|
||||
if (IsDead()) return false;
|
||||
|
||||
Isolate* isolate = Isolate::Current();
|
||||
if (isolate->IsInitialized()) return true;
|
||||
|
||||
is_running_ = true;
|
||||
has_been_set_up_ = true;
|
||||
has_fatal_error_ = false;
|
||||
has_been_disposed_ = false;
|
||||
|
||||
return isolate->Init(des);
|
||||
}
|
||||
|
||||
|
||||
void V8::SetFatalError() {
|
||||
is_running_ = false;
|
||||
has_fatal_error_ = true;
|
||||
}
|
||||
|
||||
|
||||
void V8::TearDown() {
|
||||
Isolate* isolate = Isolate::Current();
|
||||
ASSERT(isolate->IsDefaultIsolate());
|
||||
|
||||
if (!has_been_set_up_ || has_been_disposed_) return;
|
||||
|
||||
ElementsAccessor::TearDown();
|
||||
LOperand::TearDownCaches();
|
||||
RegisteredExtension::UnregisterAll();
|
||||
|
||||
isolate->TearDown();
|
||||
delete isolate;
|
||||
|
||||
is_running_ = false;
|
||||
has_been_disposed_ = true;
|
||||
|
||||
delete call_completed_callbacks_;
|
||||
call_completed_callbacks_ = NULL;
|
||||
|
||||
OS::TearDown();
|
||||
}
|
||||
|
||||
|
||||
static void seed_random(uint32_t* state) {
|
||||
for (int i = 0; i < 2; ++i) {
|
||||
if (FLAG_random_seed != 0) {
|
||||
state[i] = FLAG_random_seed;
|
||||
} else if (entropy_source != NULL) {
|
||||
uint32_t val;
|
||||
ScopedLock lock(entropy_mutex.Pointer());
|
||||
entropy_source(reinterpret_cast<unsigned char*>(&val), sizeof(uint32_t));
|
||||
state[i] = val;
|
||||
} else {
|
||||
state[i] = random();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Random number generator using George Marsaglia's MWC algorithm.
|
||||
static uint32_t random_base(uint32_t* state) {
|
||||
// Initialize seed using the system random().
|
||||
// No non-zero seed will ever become zero again.
|
||||
if (state[0] == 0) seed_random(state);
|
||||
|
||||
// Mix the bits. Never replaces state[i] with 0 if it is nonzero.
|
||||
state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16);
|
||||
state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16);
|
||||
|
||||
return (state[0] << 14) + (state[1] & 0x3FFFF);
|
||||
}
|
||||
|
||||
|
||||
void V8::SetEntropySource(EntropySource source) {
|
||||
entropy_source = source;
|
||||
}
|
||||
|
||||
|
||||
void V8::SetReturnAddressLocationResolver(
|
||||
ReturnAddressLocationResolver resolver) {
|
||||
StackFrame::SetReturnAddressLocationResolver(resolver);
|
||||
}
|
||||
|
||||
|
||||
// Used by JavaScript APIs
|
||||
uint32_t V8::Random(Context* context) {
|
||||
ASSERT(context->IsGlobalContext());
|
||||
ByteArray* seed = context->random_seed();
|
||||
return random_base(reinterpret_cast<uint32_t*>(seed->GetDataStartAddress()));
|
||||
}
|
||||
|
||||
|
||||
// Used internally by the JIT and memory allocator for security
|
||||
// purposes. So, we keep a different state to prevent informations
|
||||
// leaks that could be used in an exploit.
|
||||
uint32_t V8::RandomPrivate(Isolate* isolate) {
|
||||
ASSERT(isolate == Isolate::Current());
|
||||
return random_base(isolate->private_random_seed());
|
||||
}
|
||||
|
||||
|
||||
bool V8::IdleNotification(int hint) {
|
||||
// Returning true tells the caller that there is no need to call
|
||||
// IdleNotification again.
|
||||
if (!FLAG_use_idle_notification) return true;
|
||||
|
||||
// Tell the heap that it may want to adjust.
|
||||
return HEAP->IdleNotification(hint);
|
||||
}
|
||||
|
||||
|
||||
void V8::AddCallCompletedCallback(CallCompletedCallback callback) {
|
||||
if (call_completed_callbacks_ == NULL) { // Lazy init.
|
||||
call_completed_callbacks_ = new List<CallCompletedCallback>();
|
||||
}
|
||||
for (int i = 0; i < call_completed_callbacks_->length(); i++) {
|
||||
if (callback == call_completed_callbacks_->at(i)) return;
|
||||
}
|
||||
call_completed_callbacks_->Add(callback);
|
||||
}
|
||||
|
||||
|
||||
void V8::RemoveCallCompletedCallback(CallCompletedCallback callback) {
|
||||
if (call_completed_callbacks_ == NULL) return;
|
||||
for (int i = 0; i < call_completed_callbacks_->length(); i++) {
|
||||
if (callback == call_completed_callbacks_->at(i)) {
|
||||
call_completed_callbacks_->Remove(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void V8::FireCallCompletedCallback(Isolate* isolate) {
|
||||
if (call_completed_callbacks_ == NULL) return;
|
||||
HandleScopeImplementer* handle_scope_implementer =
|
||||
isolate->handle_scope_implementer();
|
||||
if (!handle_scope_implementer->CallDepthIsZero()) return;
|
||||
// Fire callbacks. Increase call depth to prevent recursive callbacks.
|
||||
handle_scope_implementer->IncrementCallDepth();
|
||||
for (int i = 0; i < call_completed_callbacks_->length(); i++) {
|
||||
call_completed_callbacks_->at(i)();
|
||||
}
|
||||
handle_scope_implementer->DecrementCallDepth();
|
||||
}
|
||||
|
||||
|
||||
// Use a union type to avoid type-aliasing optimizations in GCC.
|
||||
typedef union {
|
||||
double double_value;
|
||||
uint64_t uint64_t_value;
|
||||
} double_int_union;
|
||||
|
||||
|
||||
Object* V8::FillHeapNumberWithRandom(Object* heap_number,
|
||||
Context* context) {
|
||||
double_int_union r;
|
||||
uint64_t random_bits = Random(context);
|
||||
// Convert 32 random bits to 0.(32 random bits) in a double
|
||||
// by computing:
|
||||
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
|
||||
static const double binary_million = 1048576.0;
|
||||
r.double_value = binary_million;
|
||||
r.uint64_t_value |= random_bits;
|
||||
r.double_value -= binary_million;
|
||||
|
||||
HeapNumber::cast(heap_number)->set_value(r.double_value);
|
||||
return heap_number;
|
||||
}
|
||||
|
||||
void V8::InitializeOncePerProcessImpl() {
|
||||
OS::SetUp();
|
||||
|
||||
use_crankshaft_ = FLAG_crankshaft;
|
||||
|
||||
if (Serializer::enabled()) {
|
||||
use_crankshaft_ = false;
|
||||
}
|
||||
|
||||
CPU::SetUp();
|
||||
if (!CPU::SupportsCrankshaft()) {
|
||||
use_crankshaft_ = false;
|
||||
}
|
||||
|
||||
OS::PostSetUp();
|
||||
|
||||
RuntimeProfiler::GlobalSetUp();
|
||||
|
||||
ElementsAccessor::InitializeOncePerProcess();
|
||||
|
||||
if (FLAG_stress_compaction) {
|
||||
FLAG_force_marking_deque_overflows = true;
|
||||
FLAG_gc_global = true;
|
||||
FLAG_max_new_space_size = (1 << (kPageSizeBits - 10)) * 2;
|
||||
}
|
||||
|
||||
LOperand::SetUpCaches();
|
||||
SetUpJSCallerSavedCodeData();
|
||||
SamplerRegistry::SetUp();
|
||||
ExternalReference::SetUp();
|
||||
}
|
||||
|
||||
void V8::InitializeOncePerProcess() {
|
||||
CallOnce(&init_once, &InitializeOncePerProcessImpl);
|
||||
}
|
||||
|
||||
} } // namespace v8::internal
|
||||
152
test/fixtures/cpp/v8.h
vendored
Normal file
152
test/fixtures/cpp/v8.h
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following
|
||||
// disclaimer in the documentation and/or other materials provided
|
||||
// with the distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived
|
||||
// from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
//
|
||||
// Top include for all V8 .cc files.
|
||||
//
|
||||
|
||||
#ifndef V8_V8_H_
|
||||
#define V8_V8_H_
|
||||
|
||||
#if defined(GOOGLE3)
|
||||
// Google3 special flag handling.
|
||||
#if defined(DEBUG) && defined(NDEBUG)
|
||||
// V8 only uses DEBUG and whenever it is set we are building a debug
|
||||
// version of V8. We do not use NDEBUG and simply undef it here for
|
||||
// consistency.
|
||||
#undef NDEBUG
|
||||
#endif
|
||||
#endif // defined(GOOGLE3)
|
||||
|
||||
// V8 only uses DEBUG, but included external files
|
||||
// may use NDEBUG - make sure they are consistent.
|
||||
#if defined(DEBUG) && defined(NDEBUG)
|
||||
#error both DEBUG and NDEBUG are set
|
||||
#endif
|
||||
|
||||
// Basic includes
|
||||
#include "../include/v8.h"
|
||||
#include "v8globals.h"
|
||||
#include "v8checks.h"
|
||||
#include "allocation.h"
|
||||
#include "v8utils.h"
|
||||
#include "flags.h"
|
||||
|
||||
// Objects & heap
|
||||
#include "objects-inl.h"
|
||||
#include "spaces-inl.h"
|
||||
#include "heap-inl.h"
|
||||
#include "incremental-marking-inl.h"
|
||||
#include "mark-compact-inl.h"
|
||||
#include "log-inl.h"
|
||||
#include "cpu-profiler-inl.h"
|
||||
#include "handles-inl.h"
|
||||
#include "zone-inl.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
class Deserializer;
|
||||
|
||||
class V8 : public AllStatic {
|
||||
public:
|
||||
// Global actions.
|
||||
|
||||
// If Initialize is called with des == NULL, the initial state is
|
||||
// created from scratch. If a non-null Deserializer is given, the
|
||||
// initial state is created by reading the deserialized data into an
|
||||
// empty heap.
|
||||
static bool Initialize(Deserializer* des);
|
||||
static void TearDown();
|
||||
static bool IsRunning() { return is_running_; }
|
||||
static bool UseCrankshaft() { return use_crankshaft_; }
|
||||
// To be dead you have to have lived
|
||||
// TODO(isolates): move IsDead to Isolate.
|
||||
static bool IsDead() { return has_fatal_error_ || has_been_disposed_; }
|
||||
static void SetFatalError();
|
||||
|
||||
// Report process out of memory. Implementation found in api.cc.
|
||||
static void FatalProcessOutOfMemory(const char* location,
|
||||
bool take_snapshot = false);
|
||||
|
||||
// Allows an entropy source to be provided for use in random number
|
||||
// generation.
|
||||
static void SetEntropySource(EntropySource source);
|
||||
// Support for return-address rewriting profilers.
|
||||
static void SetReturnAddressLocationResolver(
|
||||
ReturnAddressLocationResolver resolver);
|
||||
// Random number generation support. Not cryptographically safe.
|
||||
static uint32_t Random(Context* context);
|
||||
// We use random numbers internally in memory allocation and in the
|
||||
// compilers for security. In order to prevent information leaks we
|
||||
// use a separate random state for internal random number
|
||||
// generation.
|
||||
static uint32_t RandomPrivate(Isolate* isolate);
|
||||
static Object* FillHeapNumberWithRandom(Object* heap_number,
|
||||
Context* context);
|
||||
|
||||
// Idle notification directly from the API.
|
||||
static bool IdleNotification(int hint);
|
||||
|
||||
static void AddCallCompletedCallback(CallCompletedCallback callback);
|
||||
static void RemoveCallCompletedCallback(CallCompletedCallback callback);
|
||||
static void FireCallCompletedCallback(Isolate* isolate);
|
||||
|
||||
private:
|
||||
static void InitializeOncePerProcessImpl();
|
||||
static void InitializeOncePerProcess();
|
||||
|
||||
// True if engine is currently running
|
||||
static bool is_running_;
|
||||
// True if V8 has ever been run
|
||||
static bool has_been_set_up_;
|
||||
// True if error has been signaled for current engine
|
||||
// (reset to false if engine is restarted)
|
||||
static bool has_fatal_error_;
|
||||
// True if engine has been shut down
|
||||
// (reset if engine is restarted)
|
||||
static bool has_been_disposed_;
|
||||
// True if we are using the crankshaft optimizing compiler.
|
||||
static bool use_crankshaft_;
|
||||
// List of callbacks when a Call completes.
|
||||
static List<CallCompletedCallback>* call_completed_callbacks_;
|
||||
};
|
||||
|
||||
|
||||
// JavaScript defines two kinds of 'nil'.
|
||||
enum NilValue { kNullValue, kUndefinedValue };
|
||||
|
||||
|
||||
// JavaScript defines two kinds of equality.
|
||||
enum EqualityKind { kStrictEquality, kNonStrictEquality };
|
||||
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
||||
namespace i = v8::internal;
|
||||
|
||||
#endif // V8_V8_H_
|
||||
243
test/fixtures/java/HtmlDomParserContext.java
vendored
Normal file
243
test/fixtures/java/HtmlDomParserContext.java
vendored
Normal file
@@ -0,0 +1,243 @@
|
||||
/**
|
||||
* (The MIT License)
|
||||
*
|
||||
* Copyright (c) 2008 - 2012:
|
||||
*
|
||||
* * {Aaron Patterson}[http://tenderlovemaking.com]
|
||||
* * {Mike Dalessio}[http://mike.daless.io]
|
||||
* * {Charles Nutter}[http://blog.headius.com]
|
||||
* * {Sergio Arbeo}[http://www.serabe.com]
|
||||
* * {Patrick Mahoney}[http://polycrystal.org]
|
||||
* * {Yoko Harada}[http://yokolet.blogspot.com]
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* 'Software'), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
package nokogiri.internals;
|
||||
|
||||
import static nokogiri.internals.NokogiriHelpers.getNokogiriClass;
|
||||
import static nokogiri.internals.NokogiriHelpers.isNamespace;
|
||||
import static nokogiri.internals.NokogiriHelpers.stringOrNil;
|
||||
import nokogiri.HtmlDocument;
|
||||
import nokogiri.NokogiriService;
|
||||
import nokogiri.XmlDocument;
|
||||
|
||||
import org.apache.xerces.parsers.DOMParser;
|
||||
import org.apache.xerces.xni.Augmentations;
|
||||
import org.apache.xerces.xni.QName;
|
||||
import org.apache.xerces.xni.XMLAttributes;
|
||||
import org.apache.xerces.xni.XNIException;
|
||||
import org.apache.xerces.xni.parser.XMLDocumentFilter;
|
||||
import org.apache.xerces.xni.parser.XMLParserConfiguration;
|
||||
import org.cyberneko.html.HTMLConfiguration;
|
||||
import org.cyberneko.html.filters.DefaultFilter;
|
||||
import org.jruby.Ruby;
|
||||
import org.jruby.RubyClass;
|
||||
import org.jruby.runtime.ThreadContext;
|
||||
import org.jruby.runtime.builtin.IRubyObject;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.NamedNodeMap;
|
||||
import org.w3c.dom.NodeList;
|
||||
|
||||
/**
|
||||
* Parser for HtmlDocument. This class actually parses HtmlDocument using NekoHtml.
|
||||
*
|
||||
* @author sergio
|
||||
* @author Patrick Mahoney <pat@polycrystal.org>
|
||||
* @author Yoko Harada <yokolet@gmail.com>
|
||||
*/
|
||||
public class HtmlDomParserContext extends XmlDomParserContext {
|
||||
|
||||
public HtmlDomParserContext(Ruby runtime, IRubyObject options) {
|
||||
super(runtime, options);
|
||||
}
|
||||
|
||||
public HtmlDomParserContext(Ruby runtime, IRubyObject encoding, IRubyObject options) {
|
||||
super(runtime, encoding, options);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initErrorHandler() {
|
||||
if (options.strict) {
|
||||
errorHandler = new NokogiriStrictErrorHandler(options.noError, options.noWarning);
|
||||
} else {
|
||||
errorHandler = new NokogiriNonStrictErrorHandler4NekoHtml(options.noError, options.noWarning);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initParser(Ruby runtime) {
|
||||
XMLParserConfiguration config = new HTMLConfiguration();
|
||||
XMLDocumentFilter removeNSAttrsFilter = new RemoveNSAttrsFilter();
|
||||
XMLDocumentFilter elementValidityCheckFilter = new ElementValidityCheckFilter(errorHandler);
|
||||
//XMLDocumentFilter[] filters = { removeNSAttrsFilter, elementValidityCheckFilter};
|
||||
XMLDocumentFilter[] filters = { elementValidityCheckFilter};
|
||||
|
||||
config.setErrorHandler(this.errorHandler);
|
||||
parser = new DOMParser(config);
|
||||
|
||||
// see http://nekohtml.sourceforge.net/settings.html for details
|
||||
setProperty("http://cyberneko.org/html/properties/default-encoding", java_encoding);
|
||||
setProperty("http://cyberneko.org/html/properties/names/elems", "lower");
|
||||
setProperty("http://cyberneko.org/html/properties/names/attrs", "lower");
|
||||
setProperty("http://cyberneko.org/html/properties/filters", filters);
|
||||
setFeature("http://cyberneko.org/html/features/report-errors", true);
|
||||
setFeature("http://xml.org/sax/features/namespaces", false);
|
||||
setFeature("http://cyberneko.org/html/features/insert-doctype", true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable NekoHTML feature for balancing tags in a document fragment.
|
||||
*
|
||||
* This method is used in XmlNode#in_context method.
|
||||
*/
|
||||
public void enableDocumentFragment() {
|
||||
setFeature("http://cyberneko.org/html/features/balance-tags/document-fragment", true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XmlDocument getNewEmptyDocument(ThreadContext context) {
|
||||
IRubyObject[] args = new IRubyObject[0];
|
||||
return (XmlDocument) XmlDocument.rbNew(context, getNokogiriClass(context.getRuntime(), "Nokogiri::HTML::Document"), args);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XmlDocument wrapDocument(ThreadContext context,
|
||||
RubyClass klazz,
|
||||
Document document) {
|
||||
HtmlDocument htmlDocument = (HtmlDocument) NokogiriService.HTML_DOCUMENT_ALLOCATOR.allocate(context.getRuntime(), klazz);
|
||||
htmlDocument.setDocumentNode(context, document);
|
||||
if (ruby_encoding.isNil()) {
|
||||
// ruby_encoding might have detected by HtmlDocument::EncodingReader
|
||||
if (detected_encoding != null && !detected_encoding.isNil()) {
|
||||
ruby_encoding = detected_encoding;
|
||||
} else {
|
||||
// no encoding given & no encoding detected, then try to get it
|
||||
String charset = tryGetCharsetFromHtml5MetaTag(document);
|
||||
ruby_encoding = stringOrNil(context.getRuntime(), charset);
|
||||
}
|
||||
}
|
||||
htmlDocument.setEncoding(ruby_encoding);
|
||||
htmlDocument.setParsedEncoding(java_encoding);
|
||||
return htmlDocument;
|
||||
}
|
||||
|
||||
// NekoHtml doesn't understand HTML5 meta tag format. This fails to detect charset
|
||||
// from an HTML5 style meta tag. Luckily, the meta tag and charset exists in DOM tree
|
||||
// so, this method attempts to find the charset.
|
||||
private String tryGetCharsetFromHtml5MetaTag(Document document) {
|
||||
if (!"html".equalsIgnoreCase(document.getDocumentElement().getNodeName())) return null;
|
||||
NodeList list = document.getDocumentElement().getChildNodes();
|
||||
for (int i = 0; i < list.getLength(); i++) {
|
||||
if ("head".equalsIgnoreCase(list.item(i).getNodeName())) {
|
||||
NodeList headers = list.item(i).getChildNodes();
|
||||
for (int j = 0; j < headers.getLength(); j++) {
|
||||
if ("meta".equalsIgnoreCase(headers.item(j).getNodeName())) {
|
||||
NamedNodeMap nodeMap = headers.item(j).getAttributes();
|
||||
for (int k = 0; k < nodeMap.getLength(); k++) {
|
||||
if ("charset".equalsIgnoreCase(nodeMap.item(k).getNodeName())) {
|
||||
return nodeMap.item(k).getNodeValue();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter to strip out attributes that pertain to XML namespaces.
|
||||
*/
|
||||
public static class RemoveNSAttrsFilter extends DefaultFilter {
|
||||
@Override
|
||||
public void startElement(QName element, XMLAttributes attrs,
|
||||
Augmentations augs) throws XNIException {
|
||||
int i;
|
||||
for (i = 0; i < attrs.getLength(); ++i) {
|
||||
if (isNamespace(attrs.getQName(i))) {
|
||||
attrs.removeAttributeAt(i);
|
||||
--i;
|
||||
}
|
||||
}
|
||||
|
||||
element.uri = null;
|
||||
super.startElement(element, attrs, augs);
|
||||
}
|
||||
}
|
||||
|
||||
public static class ElementValidityCheckFilter extends DefaultFilter {
|
||||
private NokogiriErrorHandler errorHandler;
|
||||
|
||||
private ElementValidityCheckFilter(NokogiriErrorHandler errorHandler) {
|
||||
this.errorHandler = errorHandler;
|
||||
}
|
||||
|
||||
// element names from xhtml1-strict.dtd
|
||||
private static String[][] element_names = {
|
||||
{"a", "abbr", "acronym", "address", "area"},
|
||||
{"b", "base", "basefont", "bdo", "big", "blockquote", "body", "br", "button"},
|
||||
{"caption", "cite", "code", "col", "colgroup"},
|
||||
{"dd", "del", "dfn", "div", "dl", "dt"},
|
||||
{"em"},
|
||||
{"fieldset", "font", "form", "frame", "frameset"},
|
||||
{}, // g
|
||||
{"h1", "h2", "h3", "h4", "h5", "h6", "head", "hr", "html"},
|
||||
{"i", "iframe", "img", "input", "ins"},
|
||||
{}, // j
|
||||
{"kbd"},
|
||||
{"label", "legend", "li", "link"},
|
||||
{"map", "meta"},
|
||||
{"noframes", "noscript"},
|
||||
{"object", "ol", "optgroup", "option"},
|
||||
{"p", "param", "pre"},
|
||||
{"q"},
|
||||
{}, // r
|
||||
{"s", "samp", "script", "select", "small", "span", "strike", "strong", "style", "sub", "sup"},
|
||||
{"table", "tbody", "td", "textarea", "tfoot", "th", "thead", "title", "tr", "tt"},
|
||||
{"u", "ul"},
|
||||
{"var"},
|
||||
{}, // w
|
||||
{}, // x
|
||||
{}, // y
|
||||
{} // z
|
||||
};
|
||||
|
||||
private boolean isValid(String testee) {
|
||||
char[] c = testee.toCharArray();
|
||||
int index = new Integer(c[0]) - 97;
|
||||
if (index > 25) return false;
|
||||
for (int i=0; i<element_names[index].length; i++) {
|
||||
if (testee.equals(element_names[index][i])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void startElement(QName name, XMLAttributes attrs, Augmentations augs) throws XNIException {
|
||||
if (!isValid(name.rawname)) {
|
||||
errorHandler.getErrors().add(new Exception("Tag " + name.rawname + " invalid"));
|
||||
}
|
||||
super.startElement(name, attrs, augs);
|
||||
}
|
||||
}
|
||||
}
|
||||
322
test/fixtures/java/Hudson.java
vendored
Normal file
322
test/fixtures/java/Hudson.java
vendored
Normal file
@@ -0,0 +1,322 @@
|
||||
/*
|
||||
* The MIT License
|
||||
*
|
||||
* Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi,
|
||||
* Erik Ramfelt, Koichi Fujikawa, Red Hat, Inc., Seiji Sogabe,
|
||||
* Stephen Connolly, Tom Huybrechts, Yahoo! Inc., Alan Harder, CloudBees, Inc.
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
package hudson.model;
|
||||
|
||||
import hudson.ExtensionListView;
|
||||
import hudson.Functions;
|
||||
import hudson.Platform;
|
||||
import hudson.PluginManager;
|
||||
import hudson.cli.declarative.CLIResolver;
|
||||
import hudson.model.listeners.ItemListener;
|
||||
import hudson.slaves.ComputerListener;
|
||||
import hudson.util.CopyOnWriteList;
|
||||
import hudson.util.FormValidation;
|
||||
import jenkins.model.Jenkins;
|
||||
import org.jvnet.hudson.reactor.ReactorException;
|
||||
import org.kohsuke.stapler.QueryParameter;
|
||||
import org.kohsuke.stapler.Stapler;
|
||||
import org.kohsuke.stapler.StaplerRequest;
|
||||
import org.kohsuke.stapler.StaplerResponse;
|
||||
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.ServletException;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.text.NumberFormat;
|
||||
import java.text.ParseException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static hudson.Util.fixEmpty;
|
||||
|
||||
public class Hudson extends Jenkins {
|
||||
|
||||
/**
|
||||
* List of registered {@link hudson.model.listeners.ItemListener}s.
|
||||
* @deprecated as of 1.286
|
||||
*/
|
||||
private transient final CopyOnWriteList<ItemListener> itemListeners = ExtensionListView.createCopyOnWriteList(ItemListener.class);
|
||||
|
||||
/**
|
||||
* List of registered {@link hudson.slaves.ComputerListener}s.
|
||||
* @deprecated as of 1.286
|
||||
*/
|
||||
private transient final CopyOnWriteList<ComputerListener> computerListeners = ExtensionListView.createCopyOnWriteList(ComputerListener.class);
|
||||
|
||||
|
||||
@CLIResolver
|
||||
public static Hudson getInstance() {
|
||||
return (Hudson)Jenkins.getInstance();
|
||||
}
|
||||
|
||||
public Hudson(File root, ServletContext context) throws IOException, InterruptedException, ReactorException {
|
||||
this(root,context,null);
|
||||
}
|
||||
|
||||
public Hudson(File root, ServletContext context, PluginManager pluginManager) throws IOException, InterruptedException, ReactorException {
|
||||
super(root, context, pluginManager);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all the installed {@link ItemListener}s.
|
||||
*
|
||||
* @deprecated as of 1.286.
|
||||
* Use {@link ItemListener#all()}.
|
||||
*/
|
||||
public CopyOnWriteList<ItemListener> getJobListeners() {
|
||||
return itemListeners;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all the installed {@link ComputerListener}s.
|
||||
*
|
||||
* @deprecated as of 1.286.
|
||||
* Use {@link ComputerListener#all()}.
|
||||
*/
|
||||
public CopyOnWriteList<ComputerListener> getComputerListeners() {
|
||||
return computerListeners;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the slave node of the give name, hooked under this Hudson.
|
||||
*
|
||||
* @deprecated
|
||||
* Use {@link #getNode(String)}. Since 1.252.
|
||||
*/
|
||||
public Slave getSlave(String name) {
|
||||
Node n = getNode(name);
|
||||
if (n instanceof Slave)
|
||||
return (Slave)n;
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* Use {@link #getNodes()}. Since 1.252.
|
||||
*/
|
||||
public List<Slave> getSlaves() {
|
||||
return (List)slaves;
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the slave list.
|
||||
*
|
||||
* @deprecated
|
||||
* Use {@link #setNodes(List)}. Since 1.252.
|
||||
*/
|
||||
public void setSlaves(List<Slave> slaves) throws IOException {
|
||||
setNodes(slaves);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* Left only for the compatibility of URLs.
|
||||
* Should not be invoked for any other purpose.
|
||||
*/
|
||||
public TopLevelItem getJob(String name) {
|
||||
return getItem(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* Used only for mapping jobs to URL in a case-insensitive fashion.
|
||||
*/
|
||||
public TopLevelItem getJobCaseInsensitive(String name) {
|
||||
String match = Functions.toEmailSafeString(name);
|
||||
for(TopLevelItem item : getItems()) {
|
||||
if(Functions.toEmailSafeString(item.getName()).equalsIgnoreCase(match)) {
|
||||
return item;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated as of 1.317
|
||||
* Use {@link #doQuietDown()} instead.
|
||||
*/
|
||||
public synchronized void doQuietDown(StaplerResponse rsp) throws IOException, ServletException {
|
||||
doQuietDown().generateResponse(null, rsp, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* RSS feed for log entries.
|
||||
*
|
||||
* @deprecated
|
||||
* As on 1.267, moved to "/log/rss..."
|
||||
*/
|
||||
public void doLogRss( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
|
||||
String qs = req.getQueryString();
|
||||
rsp.sendRedirect2("./log/rss"+(qs==null?"":'?'+qs));
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated as of 1.294
|
||||
* Define your own check method, instead of relying on this generic one.
|
||||
*/
|
||||
public void doFieldCheck(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
|
||||
doFieldCheck(
|
||||
fixEmpty(req.getParameter("value")),
|
||||
fixEmpty(req.getParameter("type")),
|
||||
fixEmpty(req.getParameter("errorText")),
|
||||
fixEmpty(req.getParameter("warningText"))).generateResponse(req,rsp,this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the value for a field is set; if not an error or warning text is displayed.
|
||||
* If the parameter "value" is not set then the parameter "errorText" is displayed
|
||||
* as an error text. If the parameter "errorText" is not set, then the parameter "warningText"
|
||||
* is displayed as a warning text.
|
||||
* <p>
|
||||
* If the text is set and the parameter "type" is set, it will validate that the value is of the
|
||||
* correct type. Supported types are "number, "number-positive" and "number-negative".
|
||||
*
|
||||
* @deprecated as of 1.324
|
||||
* Either use client-side validation (e.g. class="required number")
|
||||
* or define your own check method, instead of relying on this generic one.
|
||||
*/
|
||||
public FormValidation doFieldCheck(@QueryParameter(fixEmpty=true) String value,
|
||||
@QueryParameter(fixEmpty=true) String type,
|
||||
@QueryParameter(fixEmpty=true) String errorText,
|
||||
@QueryParameter(fixEmpty=true) String warningText) {
|
||||
if (value == null) {
|
||||
if (errorText != null)
|
||||
return FormValidation.error(errorText);
|
||||
if (warningText != null)
|
||||
return FormValidation.warning(warningText);
|
||||
return FormValidation.error("No error or warning text was set for fieldCheck().");
|
||||
}
|
||||
|
||||
if (type != null) {
|
||||
try {
|
||||
if (type.equalsIgnoreCase("number")) {
|
||||
NumberFormat.getInstance().parse(value);
|
||||
} else if (type.equalsIgnoreCase("number-positive")) {
|
||||
if (NumberFormat.getInstance().parse(value).floatValue() <= 0)
|
||||
return FormValidation.error(Messages.Hudson_NotAPositiveNumber());
|
||||
} else if (type.equalsIgnoreCase("number-negative")) {
|
||||
if (NumberFormat.getInstance().parse(value).floatValue() >= 0)
|
||||
return FormValidation.error(Messages.Hudson_NotANegativeNumber());
|
||||
}
|
||||
} catch (ParseException e) {
|
||||
return FormValidation.error(Messages.Hudson_NotANumber());
|
||||
}
|
||||
}
|
||||
|
||||
return FormValidation.ok();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* Use {@link Functions#isWindows()}.
|
||||
*/
|
||||
public static boolean isWindows() {
|
||||
return File.pathSeparatorChar==';';
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* Use {@link hudson.Platform#isDarwin()}
|
||||
*/
|
||||
public static boolean isDarwin() {
|
||||
return Platform.isDarwin();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated since 2007-12-18.
|
||||
* Use {@link #checkPermission(hudson.security.Permission)}
|
||||
*/
|
||||
public static boolean adminCheck() throws IOException {
|
||||
return adminCheck(Stapler.getCurrentRequest(), Stapler.getCurrentResponse());
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated since 2007-12-18.
|
||||
* Use {@link #checkPermission(hudson.security.Permission)}
|
||||
*/
|
||||
public static boolean adminCheck(StaplerRequest req,StaplerResponse rsp) throws IOException {
|
||||
if (isAdmin(req)) return true;
|
||||
|
||||
rsp.sendError(StaplerResponse.SC_FORBIDDEN);
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the current user (for which we are processing the current request)
|
||||
* has the admin access.
|
||||
*
|
||||
* @deprecated since 2007-12-18.
|
||||
* This method is deprecated when Hudson moved from simple Unix root-like model
|
||||
* of "admin gets to do everything, and others don't have any privilege" to more
|
||||
* complex {@link hudson.security.ACL} and {@link hudson.security.Permission} based scheme.
|
||||
*
|
||||
* <p>
|
||||
* For a quick migration, use {@code Hudson.getInstance().getACL().hasPermission(Hudson.ADMINISTER)}
|
||||
* To check if the user has the 'administer' role in Hudson.
|
||||
*
|
||||
* <p>
|
||||
* But ideally, your plugin should first identify a suitable {@link hudson.security.Permission} (or create one,
|
||||
* if appropriate), then identify a suitable {@link hudson.security.AccessControlled} object to check its permission
|
||||
* against.
|
||||
*/
|
||||
public static boolean isAdmin() {
|
||||
return Jenkins.getInstance().getACL().hasPermission(ADMINISTER);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated since 2007-12-18.
|
||||
* Define a custom {@link hudson.security.Permission} and check against ACL.
|
||||
* See {@link #isAdmin()} for more instructions.
|
||||
*/
|
||||
public static boolean isAdmin(StaplerRequest req) {
|
||||
return isAdmin();
|
||||
}
|
||||
|
||||
static {
|
||||
XSTREAM.alias("hudson",Hudson.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated only here for backward comp
|
||||
*/
|
||||
public static final class MasterComputer extends Jenkins.MasterComputer {
|
||||
// no op
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated only here for backward comp
|
||||
*/
|
||||
public static class CloudList extends Jenkins.CloudList {
|
||||
public CloudList(Jenkins h) {
|
||||
super(h);
|
||||
}
|
||||
|
||||
public CloudList() {// needed for XStream deserialization
|
||||
super();
|
||||
}
|
||||
}
|
||||
}
|
||||
598
test/fixtures/java/NokogiriService.java
vendored
Normal file
598
test/fixtures/java/NokogiriService.java
vendored
Normal file
@@ -0,0 +1,598 @@
|
||||
/**
|
||||
* (The MIT License)
|
||||
*
|
||||
* Copyright (c) 2008 - 2011:
|
||||
*
|
||||
* * {Aaron Patterson}[http://tenderlovemaking.com]
|
||||
* * {Mike Dalessio}[http://mike.daless.io]
|
||||
* * {Charles Nutter}[http://blog.headius.com]
|
||||
* * {Sergio Arbeo}[http://www.serabe.com]
|
||||
* * {Patrick Mahoney}[http://polycrystal.org]
|
||||
* * {Yoko Harada}[http://yokolet.blogspot.com]
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* 'Software'), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
package nokogiri;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.jruby.Ruby;
|
||||
import org.jruby.RubyArray;
|
||||
import org.jruby.RubyClass;
|
||||
import org.jruby.RubyFixnum;
|
||||
import org.jruby.RubyModule;
|
||||
import org.jruby.runtime.ObjectAllocator;
|
||||
import org.jruby.runtime.builtin.IRubyObject;
|
||||
import org.jruby.runtime.load.BasicLibraryService;
|
||||
|
||||
/**
|
||||
* Class to provide Nokogiri. This class is used to make "require 'nokogiri'" work
|
||||
* in JRuby. Also, this class holds a Ruby type cache and allocators of Ruby types.
|
||||
*
|
||||
* @author headius
|
||||
* @author Yoko Harada <yokolet@gmail.com>
|
||||
*/
|
||||
public class NokogiriService implements BasicLibraryService {
|
||||
public static final String nokogiriClassCacheGvarName = "$NOKOGIRI_CLASS_CACHE";
|
||||
public static Map<String, RubyClass> nokogiriClassCache;
|
||||
|
||||
public boolean basicLoad(Ruby ruby) {
|
||||
init(ruby);
|
||||
createNokogiriClassCahce(ruby);
|
||||
return true;
|
||||
}
|
||||
|
||||
private static void createNokogiriClassCahce(Ruby ruby) {
|
||||
nokogiriClassCache = Collections.synchronizedMap(new HashMap<String, RubyClass>());
|
||||
nokogiriClassCache.put("Nokogiri::EncodingHandler", (RubyClass)ruby.getClassFromPath("Nokogiri::EncodingHandler"));
|
||||
nokogiriClassCache.put("Nokogiri::HTML::Document", (RubyClass)ruby.getClassFromPath("Nokogiri::HTML::Document"));
|
||||
nokogiriClassCache.put("Nokogiri::HTML::ElementDescription", (RubyClass)ruby.getClassFromPath("Nokogiri::HTML::ElementDescription"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::Attr", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::Attr"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::Document", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::Document"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::DocumentFragment", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::DocumentFragment"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::DTD", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::DTD"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::Text", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::Text"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::Comment", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::Comment"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::Element", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::Element"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::ElementContent", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::ElementContent"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::ElementDecl", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::ElementDecl"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::EntityDecl", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::EntityDecl"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::EntityReference", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::EntityReference"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::ProcessingInstruction", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::ProcessingInstruction"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::CDATA", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::CDATA"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::Node", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::Node"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::NodeSet", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::NodeSet"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::Namespace", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::Namespace"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::SyntaxError", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::SyntaxError"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::Reader", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::Reader"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::RelaxNG", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::RelaxNG"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::Schema", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::Schema"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::XPathContext", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::XPathContext"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::AttributeDecl", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::AttributeDecl"));
|
||||
nokogiriClassCache.put("Nokogiri::XML::SAX::ParserContext", (RubyClass)ruby.getClassFromPath("Nokogiri::XML::SAX::ParserContext"));
|
||||
}
|
||||
|
||||
private void init(Ruby ruby) {
|
||||
RubyModule nokogiri = ruby.defineModule("Nokogiri");
|
||||
RubyModule xmlModule = nokogiri.defineModuleUnder("XML");
|
||||
RubyModule xmlSaxModule = xmlModule.defineModuleUnder("SAX");
|
||||
RubyModule htmlModule = nokogiri.defineModuleUnder("HTML");
|
||||
RubyModule htmlSaxModule = htmlModule.defineModuleUnder("SAX");
|
||||
RubyModule xsltModule = nokogiri.defineModuleUnder("XSLT");
|
||||
|
||||
createNokogiriModule(ruby, nokogiri);
|
||||
createSyntaxErrors(ruby, nokogiri, xmlModule);
|
||||
RubyClass xmlNode = createXmlModule(ruby, xmlModule);
|
||||
createHtmlModule(ruby, htmlModule);
|
||||
createDocuments(ruby, xmlModule, htmlModule, xmlNode);
|
||||
createSaxModule(ruby, xmlSaxModule, htmlSaxModule);
|
||||
createXsltModule(ruby, xsltModule);
|
||||
}
|
||||
|
||||
private void createNokogiriModule(Ruby ruby, RubyModule nokogiri) {;
|
||||
RubyClass encHandler = nokogiri.defineClassUnder("EncodingHandler", ruby.getObject(), ENCODING_HANDLER_ALLOCATOR);
|
||||
encHandler.defineAnnotatedMethods(EncodingHandler.class);
|
||||
}
|
||||
|
||||
private void createSyntaxErrors(Ruby ruby, RubyModule nokogiri, RubyModule xmlModule) {
|
||||
RubyClass syntaxError = nokogiri.defineClassUnder("SyntaxError", ruby.getStandardError(), ruby.getStandardError().getAllocator());
|
||||
RubyClass xmlSyntaxError = xmlModule.defineClassUnder("SyntaxError", syntaxError, XML_SYNTAXERROR_ALLOCATOR);
|
||||
xmlSyntaxError.defineAnnotatedMethods(XmlSyntaxError.class);
|
||||
}
|
||||
|
||||
private RubyClass createXmlModule(Ruby ruby, RubyModule xmlModule) {
|
||||
RubyClass node = xmlModule.defineClassUnder("Node", ruby.getObject(), XML_NODE_ALLOCATOR);
|
||||
node.defineAnnotatedMethods(XmlNode.class);
|
||||
|
||||
RubyClass attr = xmlModule.defineClassUnder("Attr", node, XML_ATTR_ALLOCATOR);
|
||||
attr.defineAnnotatedMethods(XmlAttr.class);
|
||||
|
||||
RubyClass attrDecl = xmlModule.defineClassUnder("AttributeDecl", node, XML_ATTRIBUTE_DECL_ALLOCATOR);
|
||||
attrDecl.defineAnnotatedMethods(XmlAttributeDecl.class);
|
||||
|
||||
RubyClass characterData = xmlModule.defineClassUnder("CharacterData", node, null);
|
||||
|
||||
RubyClass comment = xmlModule.defineClassUnder("Comment", characterData, XML_COMMENT_ALLOCATOR);
|
||||
comment.defineAnnotatedMethods(XmlComment.class);
|
||||
|
||||
RubyClass text = xmlModule.defineClassUnder("Text", characterData, XML_TEXT_ALLOCATOR);
|
||||
text.defineAnnotatedMethods(XmlText.class);
|
||||
|
||||
RubyModule cdata = xmlModule.defineClassUnder("CDATA", text, XML_CDATA_ALLOCATOR);
|
||||
cdata.defineAnnotatedMethods(XmlCdata.class);
|
||||
|
||||
RubyClass dtd = xmlModule.defineClassUnder("DTD", node, XML_DTD_ALLOCATOR);
|
||||
dtd.defineAnnotatedMethods(XmlDtd.class);
|
||||
|
||||
RubyClass documentFragment = xmlModule.defineClassUnder("DocumentFragment", node, XML_DOCUMENT_FRAGMENT_ALLOCATOR);
|
||||
documentFragment.defineAnnotatedMethods(XmlDocumentFragment.class);
|
||||
|
||||
RubyClass element = xmlModule.defineClassUnder("Element", node, XML_ELEMENT_ALLOCATOR);
|
||||
element.defineAnnotatedMethods(XmlElement.class);
|
||||
|
||||
RubyClass elementContent = xmlModule.defineClassUnder("ElementContent", ruby.getObject(), XML_ELEMENT_CONTENT_ALLOCATOR);
|
||||
elementContent.defineAnnotatedMethods(XmlElementContent.class);
|
||||
|
||||
RubyClass elementDecl = xmlModule.defineClassUnder("ElementDecl", node, XML_ELEMENT_DECL_ALLOCATOR);
|
||||
elementDecl.defineAnnotatedMethods(XmlElementDecl.class);
|
||||
|
||||
RubyClass entityDecl = xmlModule.defineClassUnder("EntityDecl", node, XML_ENTITY_DECL_ALLOCATOR);
|
||||
entityDecl.defineAnnotatedMethods(XmlEntityDecl.class);
|
||||
|
||||
entityDecl.defineConstant("INTERNAL_GENERAL", RubyFixnum.newFixnum(ruby, XmlEntityDecl.INTERNAL_GENERAL));
|
||||
entityDecl.defineConstant("EXTERNAL_GENERAL_PARSED", RubyFixnum.newFixnum(ruby, XmlEntityDecl.EXTERNAL_GENERAL_PARSED));
|
||||
entityDecl.defineConstant("EXTERNAL_GENERAL_UNPARSED", RubyFixnum.newFixnum(ruby, XmlEntityDecl.EXTERNAL_GENERAL_UNPARSED));
|
||||
entityDecl.defineConstant("INTERNAL_PARAMETER", RubyFixnum.newFixnum(ruby, XmlEntityDecl.INTERNAL_PARAMETER));
|
||||
entityDecl.defineConstant("EXTERNAL_PARAMETER", RubyFixnum.newFixnum(ruby, XmlEntityDecl.EXTERNAL_PARAMETER));
|
||||
entityDecl.defineConstant("INTERNAL_PREDEFINED", RubyFixnum.newFixnum(ruby, XmlEntityDecl.INTERNAL_PREDEFINED));
|
||||
|
||||
RubyClass entref = xmlModule.defineClassUnder("EntityReference", node, XML_ENTITY_REFERENCE_ALLOCATOR);
|
||||
entref.defineAnnotatedMethods(XmlEntityReference.class);
|
||||
|
||||
RubyClass namespace = xmlModule.defineClassUnder("Namespace", ruby.getObject(), XML_NAMESPACE_ALLOCATOR);
|
||||
namespace.defineAnnotatedMethods(XmlNamespace.class);
|
||||
|
||||
RubyClass nodeSet = xmlModule.defineClassUnder("NodeSet", ruby.getObject(), XML_NODESET_ALLOCATOR);
|
||||
nodeSet.defineAnnotatedMethods(XmlNodeSet.class);
|
||||
|
||||
RubyClass pi = xmlModule.defineClassUnder("ProcessingInstruction", node, XML_PROCESSING_INSTRUCTION_ALLOCATOR);
|
||||
pi.defineAnnotatedMethods(XmlProcessingInstruction.class);
|
||||
|
||||
RubyClass reader = xmlModule.defineClassUnder("Reader", ruby.getObject(), XML_READER_ALLOCATOR);
|
||||
reader.defineAnnotatedMethods(XmlReader.class);
|
||||
|
||||
RubyClass schema = xmlModule.defineClassUnder("Schema", ruby.getObject(), XML_SCHEMA_ALLOCATOR);
|
||||
schema.defineAnnotatedMethods(XmlSchema.class);
|
||||
|
||||
RubyClass relaxng = xmlModule.defineClassUnder("RelaxNG", schema, XML_RELAXNG_ALLOCATOR);
|
||||
relaxng.defineAnnotatedMethods(XmlRelaxng.class);
|
||||
|
||||
RubyClass xpathContext = xmlModule.defineClassUnder("XPathContext", ruby.getObject(), XML_XPATHCONTEXT_ALLOCATOR);
|
||||
xpathContext.defineAnnotatedMethods(XmlXpathContext.class);
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
private void createHtmlModule(Ruby ruby, RubyModule htmlModule) {
|
||||
RubyClass htmlElemDesc = htmlModule.defineClassUnder("ElementDescription", ruby.getObject(), HTML_ELEMENT_DESCRIPTION_ALLOCATOR);
|
||||
htmlElemDesc.defineAnnotatedMethods(HtmlElementDescription.class);
|
||||
|
||||
RubyClass htmlEntityLookup = htmlModule.defineClassUnder("EntityLookup", ruby.getObject(), HTML_ENTITY_LOOKUP_ALLOCATOR);
|
||||
htmlEntityLookup.defineAnnotatedMethods(HtmlEntityLookup.class);
|
||||
}
|
||||
|
||||
private void createDocuments(Ruby ruby, RubyModule xmlModule, RubyModule htmlModule, RubyClass node) {
|
||||
RubyClass xmlDocument = xmlModule.defineClassUnder("Document", node, XML_DOCUMENT_ALLOCATOR);
|
||||
xmlDocument.defineAnnotatedMethods(XmlDocument.class);
|
||||
|
||||
//RubyModule htmlDoc = html.defineOrGetClassUnder("Document", document);
|
||||
RubyModule htmlDocument = htmlModule.defineClassUnder("Document", xmlDocument, HTML_DOCUMENT_ALLOCATOR);
|
||||
htmlDocument.defineAnnotatedMethods(HtmlDocument.class);
|
||||
}
|
||||
|
||||
private void createSaxModule(Ruby ruby, RubyModule xmlSaxModule, RubyModule htmlSaxModule) {
|
||||
RubyClass xmlSaxParserContext = xmlSaxModule.defineClassUnder("ParserContext", ruby.getObject(), XML_SAXPARSER_CONTEXT_ALLOCATOR);
|
||||
xmlSaxParserContext.defineAnnotatedMethods(XmlSaxParserContext.class);
|
||||
|
||||
RubyClass xmlSaxPushParser = xmlSaxModule.defineClassUnder("PushParser", ruby.getObject(), XML_SAXPUSHPARSER_ALLOCATOR);
|
||||
xmlSaxPushParser.defineAnnotatedMethods(XmlSaxPushParser.class);
|
||||
|
||||
RubyClass htmlSaxParserContext = htmlSaxModule.defineClassUnder("ParserContext", xmlSaxParserContext, HTML_SAXPARSER_CONTEXT_ALLOCATOR);
|
||||
htmlSaxParserContext.defineAnnotatedMethods(HtmlSaxParserContext.class);
|
||||
}
|
||||
|
||||
private void createXsltModule(Ruby ruby, RubyModule xsltModule) {
|
||||
RubyClass stylesheet = xsltModule.defineClassUnder("Stylesheet", ruby.getObject(), XSLT_STYLESHEET_ALLOCATOR);
|
||||
stylesheet.defineAnnotatedMethods(XsltStylesheet.class);
|
||||
xsltModule.defineAnnotatedMethod(XsltStylesheet.class, "register");
|
||||
}
|
||||
|
||||
private static ObjectAllocator ENCODING_HANDLER_ALLOCATOR = new ObjectAllocator() {
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
return new EncodingHandler(runtime, klazz, "");
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator HTML_DOCUMENT_ALLOCATOR = new ObjectAllocator() {
|
||||
private HtmlDocument htmlDocument = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (htmlDocument == null) htmlDocument = new HtmlDocument(runtime, klazz);
|
||||
try {
|
||||
HtmlDocument clone = (HtmlDocument) htmlDocument.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new HtmlDocument(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator HTML_SAXPARSER_CONTEXT_ALLOCATOR = new ObjectAllocator() {
|
||||
private HtmlSaxParserContext htmlSaxParserContext = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (htmlSaxParserContext == null) htmlSaxParserContext = new HtmlSaxParserContext(runtime, klazz);
|
||||
try {
|
||||
HtmlSaxParserContext clone = (HtmlSaxParserContext) htmlSaxParserContext.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new HtmlSaxParserContext(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private static ObjectAllocator HTML_ELEMENT_DESCRIPTION_ALLOCATOR =
|
||||
new ObjectAllocator() {
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
return new HtmlElementDescription(runtime, klazz);
|
||||
}
|
||||
};
|
||||
|
||||
private static ObjectAllocator HTML_ENTITY_LOOKUP_ALLOCATOR =
|
||||
new ObjectAllocator() {
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
return new HtmlEntityLookup(runtime, klazz);
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_ATTR_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlAttr xmlAttr = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlAttr == null) xmlAttr = new XmlAttr(runtime, klazz);
|
||||
try {
|
||||
XmlAttr clone = (XmlAttr) xmlAttr.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlAttr(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_CDATA_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlCdata xmlCdata = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlCdata == null) xmlCdata = new XmlCdata(runtime, klazz);
|
||||
try {
|
||||
XmlCdata clone = (XmlCdata) xmlCdata.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlCdata(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_COMMENT_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlComment xmlComment = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlComment == null) xmlComment = new XmlComment(runtime, klazz);
|
||||
try {
|
||||
XmlComment clone = (XmlComment) xmlComment.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlComment(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_DOCUMENT_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlDocument xmlDocument = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlDocument == null) xmlDocument = new XmlDocument(runtime, klazz);
|
||||
try {
|
||||
XmlDocument clone = (XmlDocument) xmlDocument.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlDocument(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_DOCUMENT_FRAGMENT_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlDocumentFragment xmlDocumentFragment = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlDocumentFragment == null) xmlDocumentFragment = new XmlDocumentFragment(runtime, klazz);
|
||||
try {
|
||||
XmlDocumentFragment clone = (XmlDocumentFragment)xmlDocumentFragment.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlDocumentFragment(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_DTD_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlDtd xmlDtd = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlDtd == null) xmlDtd = new XmlDtd(runtime, klazz);
|
||||
try {
|
||||
XmlDtd clone = (XmlDtd)xmlDtd.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlDtd(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_ELEMENT_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlElement xmlElement = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlElement == null) xmlElement = new XmlElement(runtime, klazz);
|
||||
try {
|
||||
XmlElement clone = (XmlElement)xmlElement.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlElement(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static ObjectAllocator XML_ELEMENT_DECL_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlElementDecl xmlElementDecl = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlElementDecl == null) xmlElementDecl = new XmlElementDecl(runtime, klazz);
|
||||
try {
|
||||
XmlElementDecl clone = (XmlElementDecl)xmlElementDecl.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlElementDecl(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static ObjectAllocator XML_ENTITY_REFERENCE_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlEntityReference xmlEntityRef = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlEntityRef == null) xmlEntityRef = new XmlEntityReference(runtime, klazz);
|
||||
try {
|
||||
XmlEntityReference clone = (XmlEntityReference)xmlEntityRef.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlEntityReference(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_NAMESPACE_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlNamespace xmlNamespace = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlNamespace == null) xmlNamespace = new XmlNamespace(runtime, klazz);
|
||||
try {
|
||||
XmlNamespace clone = (XmlNamespace) xmlNamespace.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlNamespace(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_NODE_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlNode xmlNode = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlNode == null) xmlNode = new XmlNode(runtime, klazz);
|
||||
try {
|
||||
XmlNode clone = (XmlNode) xmlNode.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlNode(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_NODESET_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlNodeSet xmlNodeSet = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlNodeSet == null) xmlNodeSet = new XmlNodeSet(runtime, klazz);
|
||||
try {
|
||||
XmlNodeSet clone = (XmlNodeSet) xmlNodeSet.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
xmlNodeSet = new XmlNodeSet(runtime, klazz);
|
||||
xmlNodeSet.setNodes(RubyArray.newEmptyArray(runtime));
|
||||
return xmlNodeSet;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static ObjectAllocator XML_PROCESSING_INSTRUCTION_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlProcessingInstruction xmlProcessingInstruction = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlProcessingInstruction == null) xmlProcessingInstruction = new XmlProcessingInstruction(runtime, klazz);
|
||||
try {
|
||||
XmlProcessingInstruction clone = (XmlProcessingInstruction)xmlProcessingInstruction.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlProcessingInstruction(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static ObjectAllocator XML_READER_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlReader xmlReader = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlReader == null) xmlReader = new XmlReader(runtime, klazz);
|
||||
try {
|
||||
XmlReader clone = (XmlReader) xmlReader.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
xmlReader = new XmlReader(runtime, klazz);
|
||||
return xmlReader;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private static ObjectAllocator XML_ATTRIBUTE_DECL_ALLOCATOR = new ObjectAllocator() {
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
return new XmlAttributeDecl(runtime, klazz);
|
||||
}
|
||||
};
|
||||
|
||||
private static ObjectAllocator XML_ENTITY_DECL_ALLOCATOR = new ObjectAllocator() {
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
return new XmlEntityDecl(runtime, klazz);
|
||||
}
|
||||
};
|
||||
|
||||
private static ObjectAllocator XML_ELEMENT_CONTENT_ALLOCATOR = new ObjectAllocator() {
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
throw runtime.newNotImplementedError("not implemented");
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_RELAXNG_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlRelaxng xmlRelaxng = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlRelaxng == null) xmlRelaxng = new XmlRelaxng(runtime, klazz);
|
||||
try {
|
||||
XmlRelaxng clone = (XmlRelaxng) xmlRelaxng.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlRelaxng(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_SAXPARSER_CONTEXT_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlSaxParserContext xmlSaxParserContext = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlSaxParserContext == null) xmlSaxParserContext = new XmlSaxParserContext(runtime, klazz);
|
||||
try {
|
||||
XmlSaxParserContext clone = (XmlSaxParserContext) xmlSaxParserContext.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlSaxParserContext(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private static ObjectAllocator XML_SAXPUSHPARSER_ALLOCATOR = new ObjectAllocator() {
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
return new XmlSaxPushParser(runtime, klazz);
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_SCHEMA_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlSchema xmlSchema = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlSchema == null) xmlSchema = new XmlSchema(runtime, klazz);
|
||||
try {
|
||||
XmlSchema clone = (XmlSchema) xmlSchema.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlSchema(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_SYNTAXERROR_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlSyntaxError xmlSyntaxError = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlSyntaxError == null) xmlSyntaxError = new XmlSyntaxError(runtime, klazz);
|
||||
try {
|
||||
XmlSyntaxError clone = (XmlSyntaxError) xmlSyntaxError.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlSyntaxError(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static final ObjectAllocator XML_TEXT_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlText xmlText = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlText == null) xmlText = new XmlText(runtime, klazz);
|
||||
try {
|
||||
XmlText clone = (XmlText) xmlText.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlText(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static ObjectAllocator XML_XPATHCONTEXT_ALLOCATOR = new ObjectAllocator() {
|
||||
private XmlXpathContext xmlXpathContext = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xmlXpathContext == null) xmlXpathContext = new XmlXpathContext(runtime, klazz);
|
||||
try {
|
||||
XmlXpathContext clone = (XmlXpathContext) xmlXpathContext.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlXpathContext(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static ObjectAllocator XSLT_STYLESHEET_ALLOCATOR = new ObjectAllocator() {
|
||||
private XsltStylesheet xsltStylesheet = null;
|
||||
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
|
||||
if (xsltStylesheet == null) xsltStylesheet = new XsltStylesheet(runtime, klazz);
|
||||
try {
|
||||
XsltStylesheet clone = (XsltStylesheet) xsltStylesheet.clone();
|
||||
clone.setMetaClass(klazz);
|
||||
return clone;
|
||||
} catch (CloneNotSupportedException e) {
|
||||
return new XmlText(runtime, klazz);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
872
test/fixtures/java/clojure-type.java
vendored
Normal file
872
test/fixtures/java/clojure-type.java
vendored
Normal file
@@ -0,0 +1,872 @@
|
||||
/***
|
||||
* ASM: a very small and fast Java bytecode manipulation framework
|
||||
* Copyright (c) 2000-2005 INRIA, France Telecom
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. Neither the name of the copyright holders nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
|
||||
* THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
package clojure.asm;
|
||||
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
/**
|
||||
* A Java type. This class can be used to make it easier to manipulate type and
|
||||
* method descriptors.
|
||||
*
|
||||
* @author Eric Bruneton
|
||||
* @author Chris Nokleberg
|
||||
*/
|
||||
public class Type{
|
||||
|
||||
/**
|
||||
* The sort of the <tt>void</tt> type. See {@link #getSort getSort}.
|
||||
*/
|
||||
public final static int VOID = 0;
|
||||
|
||||
/**
|
||||
* The sort of the <tt>boolean</tt> type. See {@link #getSort getSort}.
|
||||
*/
|
||||
public final static int BOOLEAN = 1;
|
||||
|
||||
/**
|
||||
* The sort of the <tt>char</tt> type. See {@link #getSort getSort}.
|
||||
*/
|
||||
public final static int CHAR = 2;
|
||||
|
||||
/**
|
||||
* The sort of the <tt>byte</tt> type. See {@link #getSort getSort}.
|
||||
*/
|
||||
public final static int BYTE = 3;
|
||||
|
||||
/**
|
||||
* The sort of the <tt>short</tt> type. See {@link #getSort getSort}.
|
||||
*/
|
||||
public final static int SHORT = 4;
|
||||
|
||||
/**
|
||||
* The sort of the <tt>int</tt> type. See {@link #getSort getSort}.
|
||||
*/
|
||||
public final static int INT = 5;
|
||||
|
||||
/**
|
||||
* The sort of the <tt>float</tt> type. See {@link #getSort getSort}.
|
||||
*/
|
||||
public final static int FLOAT = 6;
|
||||
|
||||
/**
|
||||
* The sort of the <tt>long</tt> type. See {@link #getSort getSort}.
|
||||
*/
|
||||
public final static int LONG = 7;
|
||||
|
||||
/**
|
||||
* The sort of the <tt>double</tt> type. See {@link #getSort getSort}.
|
||||
*/
|
||||
public final static int DOUBLE = 8;
|
||||
|
||||
/**
|
||||
* The sort of array reference types. See {@link #getSort getSort}.
|
||||
*/
|
||||
public final static int ARRAY = 9;
|
||||
|
||||
/**
|
||||
* The sort of object reference type. See {@link #getSort getSort}.
|
||||
*/
|
||||
public final static int OBJECT = 10;
|
||||
|
||||
/**
|
||||
* The <tt>void</tt> type.
|
||||
*/
|
||||
public final static Type VOID_TYPE = new Type(VOID);
|
||||
|
||||
/**
|
||||
* The <tt>boolean</tt> type.
|
||||
*/
|
||||
public final static Type BOOLEAN_TYPE = new Type(BOOLEAN);
|
||||
|
||||
/**
|
||||
* The <tt>char</tt> type.
|
||||
*/
|
||||
public final static Type CHAR_TYPE = new Type(CHAR);
|
||||
|
||||
/**
|
||||
* The <tt>byte</tt> type.
|
||||
*/
|
||||
public final static Type BYTE_TYPE = new Type(BYTE);
|
||||
|
||||
/**
|
||||
* The <tt>short</tt> type.
|
||||
*/
|
||||
public final static Type SHORT_TYPE = new Type(SHORT);
|
||||
|
||||
/**
|
||||
* The <tt>int</tt> type.
|
||||
*/
|
||||
public final static Type INT_TYPE = new Type(INT);
|
||||
|
||||
/**
|
||||
* The <tt>float</tt> type.
|
||||
*/
|
||||
public final static Type FLOAT_TYPE = new Type(FLOAT);
|
||||
|
||||
/**
|
||||
* The <tt>long</tt> type.
|
||||
*/
|
||||
public final static Type LONG_TYPE = new Type(LONG);
|
||||
|
||||
/**
|
||||
* The <tt>double</tt> type.
|
||||
*/
|
||||
public final static Type DOUBLE_TYPE = new Type(DOUBLE);
|
||||
|
||||
// ------------------------------------------------------------------------
|
||||
// Fields
|
||||
// ------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* The sort of this Java type.
|
||||
*/
|
||||
private final int sort;
|
||||
|
||||
/**
|
||||
* A buffer containing the descriptor of this Java type. This field is only
|
||||
* used for reference types.
|
||||
*/
|
||||
private char[] buf;
|
||||
|
||||
/**
|
||||
* The offset of the descriptor of this Java type in {@link #buf buf}. This
|
||||
* field is only used for reference types.
|
||||
*/
|
||||
private int off;
|
||||
|
||||
/**
|
||||
* The length of the descriptor of this Java type.
|
||||
*/
|
||||
private int len;
|
||||
|
||||
// ------------------------------------------------------------------------
|
||||
// Constructors
|
||||
// ------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Constructs a primitive type.
|
||||
*
|
||||
* @param sort the sort of the primitive type to be constructed.
|
||||
*/
|
||||
private Type(final int sort){
|
||||
this.sort = sort;
|
||||
this.len = 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a reference type.
|
||||
*
|
||||
* @param sort the sort of the reference type to be constructed.
|
||||
* @param buf a buffer containing the descriptor of the previous type.
|
||||
* @param off the offset of this descriptor in the previous buffer.
|
||||
* @param len the length of this descriptor.
|
||||
*/
|
||||
private Type(final int sort, final char[] buf, final int off, final int len){
|
||||
this.sort = sort;
|
||||
this.buf = buf;
|
||||
this.off = off;
|
||||
this.len = len;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Java type corresponding to the given type descriptor.
|
||||
*
|
||||
* @param typeDescriptor a type descriptor.
|
||||
* @return the Java type corresponding to the given type descriptor.
|
||||
*/
|
||||
public static Type getType(final String typeDescriptor){
|
||||
return getType(typeDescriptor.toCharArray(), 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Java type corresponding to the given class.
|
||||
*
|
||||
* @param c a class.
|
||||
* @return the Java type corresponding to the given class.
|
||||
*/
|
||||
public static Type getType(final Class c){
|
||||
if(c.isPrimitive())
|
||||
{
|
||||
if(c == Integer.TYPE)
|
||||
{
|
||||
return INT_TYPE;
|
||||
}
|
||||
else if(c == Void.TYPE)
|
||||
{
|
||||
return VOID_TYPE;
|
||||
}
|
||||
else if(c == Boolean.TYPE)
|
||||
{
|
||||
return BOOLEAN_TYPE;
|
||||
}
|
||||
else if(c == Byte.TYPE)
|
||||
{
|
||||
return BYTE_TYPE;
|
||||
}
|
||||
else if(c == Character.TYPE)
|
||||
{
|
||||
return CHAR_TYPE;
|
||||
}
|
||||
else if(c == Short.TYPE)
|
||||
{
|
||||
return SHORT_TYPE;
|
||||
}
|
||||
else if(c == Double.TYPE)
|
||||
{
|
||||
return DOUBLE_TYPE;
|
||||
}
|
||||
else if(c == Float.TYPE)
|
||||
{
|
||||
return FLOAT_TYPE;
|
||||
}
|
||||
else /* if (c == Long.TYPE) */
|
||||
{
|
||||
return LONG_TYPE;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
return getType(getDescriptor(c));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link Type#OBJECT} type for the given internal class name.
|
||||
* This is a shortcut method for <code>Type.getType("L"+name+";")</code>.
|
||||
* <i>Note that opposed to {@link Type#getType(String)}, this method takes
|
||||
* internal class names and not class descriptor.</i>
|
||||
*
|
||||
* @param name an internal class name.
|
||||
* @return the the {@link Type#OBJECT} type for the given class name.
|
||||
*/
|
||||
public static Type getObjectType(String name){
|
||||
int l = name.length();
|
||||
char[] buf = new char[l + 2];
|
||||
buf[0] = 'L';
|
||||
buf[l + 1] = ';';
|
||||
name.getChars(0, l, buf, 1);
|
||||
return new Type(OBJECT, buf, 0, l + 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Java types corresponding to the argument types of the given
|
||||
* method descriptor.
|
||||
*
|
||||
* @param methodDescriptor a method descriptor.
|
||||
* @return the Java types corresponding to the argument types of the given
|
||||
* method descriptor.
|
||||
*/
|
||||
public static Type[] getArgumentTypes(final String methodDescriptor){
|
||||
char[] buf = methodDescriptor.toCharArray();
|
||||
int off = 1;
|
||||
int size = 0;
|
||||
while(true)
|
||||
{
|
||||
char car = buf[off++];
|
||||
if(car == ')')
|
||||
{
|
||||
break;
|
||||
}
|
||||
else if(car == 'L')
|
||||
{
|
||||
while(buf[off++] != ';')
|
||||
{
|
||||
}
|
||||
++size;
|
||||
}
|
||||
else if(car != '[')
|
||||
{
|
||||
++size;
|
||||
}
|
||||
}
|
||||
Type[] args = new Type[size];
|
||||
off = 1;
|
||||
size = 0;
|
||||
while(buf[off] != ')')
|
||||
{
|
||||
args[size] = getType(buf, off);
|
||||
off += args[size].len;
|
||||
size += 1;
|
||||
}
|
||||
return args;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Java types corresponding to the argument types of the given
|
||||
* method.
|
||||
*
|
||||
* @param method a method.
|
||||
* @return the Java types corresponding to the argument types of the given
|
||||
* method.
|
||||
*/
|
||||
public static Type[] getArgumentTypes(final Method method){
|
||||
Class[] classes = method.getParameterTypes();
|
||||
Type[] types = new Type[classes.length];
|
||||
for(int i = classes.length - 1; i >= 0; --i)
|
||||
{
|
||||
types[i] = getType(classes[i]);
|
||||
}
|
||||
return types;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Java type corresponding to the return type of the given
|
||||
* method descriptor.
|
||||
*
|
||||
* @param methodDescriptor a method descriptor.
|
||||
* @return the Java type corresponding to the return type of the given
|
||||
* method descriptor.
|
||||
*/
|
||||
public static Type getReturnType(final String methodDescriptor){
|
||||
char[] buf = methodDescriptor.toCharArray();
|
||||
return getType(buf, methodDescriptor.indexOf(')') + 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Java type corresponding to the return type of the given
|
||||
* method.
|
||||
*
|
||||
* @param method a method.
|
||||
* @return the Java type corresponding to the return type of the given
|
||||
* method.
|
||||
*/
|
||||
public static Type getReturnType(final Method method){
|
||||
return getType(method.getReturnType());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Java type corresponding to the given type descriptor.
|
||||
*
|
||||
* @param buf a buffer containing a type descriptor.
|
||||
* @param off the offset of this descriptor in the previous buffer.
|
||||
* @return the Java type corresponding to the given type descriptor.
|
||||
*/
|
||||
private static Type getType(final char[] buf, final int off){
|
||||
int len;
|
||||
switch(buf[off])
|
||||
{
|
||||
case'V':
|
||||
return VOID_TYPE;
|
||||
case'Z':
|
||||
return BOOLEAN_TYPE;
|
||||
case'C':
|
||||
return CHAR_TYPE;
|
||||
case'B':
|
||||
return BYTE_TYPE;
|
||||
case'S':
|
||||
return SHORT_TYPE;
|
||||
case'I':
|
||||
return INT_TYPE;
|
||||
case'F':
|
||||
return FLOAT_TYPE;
|
||||
case'J':
|
||||
return LONG_TYPE;
|
||||
case'D':
|
||||
return DOUBLE_TYPE;
|
||||
case'[':
|
||||
len = 1;
|
||||
while(buf[off + len] == '[')
|
||||
{
|
||||
++len;
|
||||
}
|
||||
if(buf[off + len] == 'L')
|
||||
{
|
||||
++len;
|
||||
while(buf[off + len] != ';')
|
||||
{
|
||||
++len;
|
||||
}
|
||||
}
|
||||
return new Type(ARRAY, buf, off, len + 1);
|
||||
// case 'L':
|
||||
default:
|
||||
len = 1;
|
||||
while(buf[off + len] != ';')
|
||||
{
|
||||
++len;
|
||||
}
|
||||
return new Type(OBJECT, buf, off, len + 1);
|
||||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------
|
||||
// Accessors
|
||||
// ------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the sort of this Java type.
|
||||
*
|
||||
* @return {@link #VOID VOID}, {@link #BOOLEAN BOOLEAN},
|
||||
* {@link #CHAR CHAR}, {@link #BYTE BYTE}, {@link #SHORT SHORT},
|
||||
* {@link #INT INT}, {@link #FLOAT FLOAT}, {@link #LONG LONG},
|
||||
* {@link #DOUBLE DOUBLE}, {@link #ARRAY ARRAY} or
|
||||
* {@link #OBJECT OBJECT}.
|
||||
*/
|
||||
public int getSort(){
|
||||
return sort;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of dimensions of this array type. This method should
|
||||
* only be used for an array type.
|
||||
*
|
||||
* @return the number of dimensions of this array type.
|
||||
*/
|
||||
public int getDimensions(){
|
||||
int i = 1;
|
||||
while(buf[off + i] == '[')
|
||||
{
|
||||
++i;
|
||||
}
|
||||
return i;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the type of the elements of this array type. This method should
|
||||
* only be used for an array type.
|
||||
*
|
||||
* @return Returns the type of the elements of this array type.
|
||||
*/
|
||||
public Type getElementType(){
|
||||
return getType(buf, off + getDimensions());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the class corresponding to this type.
|
||||
*
|
||||
* @return the fully qualified name of the class corresponding to this type.
|
||||
*/
|
||||
public String getClassName(){
|
||||
switch(sort)
|
||||
{
|
||||
case VOID:
|
||||
return "void";
|
||||
case BOOLEAN:
|
||||
return "boolean";
|
||||
case CHAR:
|
||||
return "char";
|
||||
case BYTE:
|
||||
return "byte";
|
||||
case SHORT:
|
||||
return "short";
|
||||
case INT:
|
||||
return "int";
|
||||
case FLOAT:
|
||||
return "float";
|
||||
case LONG:
|
||||
return "long";
|
||||
case DOUBLE:
|
||||
return "double";
|
||||
case ARRAY:
|
||||
StringBuffer b = new StringBuffer(getElementType().getClassName());
|
||||
for(int i = getDimensions(); i > 0; --i)
|
||||
{
|
||||
b.append("[]");
|
||||
}
|
||||
return b.toString();
|
||||
// case OBJECT:
|
||||
default:
|
||||
return new String(buf, off + 1, len - 2).replace('/', '.');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the internal name of the class corresponding to this object type.
|
||||
* The internal name of a class is its fully qualified name, where '.' are
|
||||
* replaced by '/'. This method should only be used for an object type.
|
||||
*
|
||||
* @return the internal name of the class corresponding to this object type.
|
||||
*/
|
||||
public String getInternalName(){
|
||||
return new String(buf, off + 1, len - 2);
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------
|
||||
// Conversion to type descriptors
|
||||
// ------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the descriptor corresponding to this Java type.
|
||||
*
|
||||
* @return the descriptor corresponding to this Java type.
|
||||
*/
|
||||
public String getDescriptor(){
|
||||
StringBuffer buf = new StringBuffer();
|
||||
getDescriptor(buf);
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the descriptor corresponding to the given argument and return
|
||||
* types.
|
||||
*
|
||||
* @param returnType the return type of the method.
|
||||
* @param argumentTypes the argument types of the method.
|
||||
* @return the descriptor corresponding to the given argument and return
|
||||
* types.
|
||||
*/
|
||||
public static String getMethodDescriptor(
|
||||
final Type returnType,
|
||||
final Type[] argumentTypes){
|
||||
StringBuffer buf = new StringBuffer();
|
||||
buf.append('(');
|
||||
for(int i = 0; i < argumentTypes.length; ++i)
|
||||
{
|
||||
argumentTypes[i].getDescriptor(buf);
|
||||
}
|
||||
buf.append(')');
|
||||
returnType.getDescriptor(buf);
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends the descriptor corresponding to this Java type to the given
|
||||
* string buffer.
|
||||
*
|
||||
* @param buf the string buffer to which the descriptor must be appended.
|
||||
*/
|
||||
private void getDescriptor(final StringBuffer buf){
|
||||
switch(sort)
|
||||
{
|
||||
case VOID:
|
||||
buf.append('V');
|
||||
return;
|
||||
case BOOLEAN:
|
||||
buf.append('Z');
|
||||
return;
|
||||
case CHAR:
|
||||
buf.append('C');
|
||||
return;
|
||||
case BYTE:
|
||||
buf.append('B');
|
||||
return;
|
||||
case SHORT:
|
||||
buf.append('S');
|
||||
return;
|
||||
case INT:
|
||||
buf.append('I');
|
||||
return;
|
||||
case FLOAT:
|
||||
buf.append('F');
|
||||
return;
|
||||
case LONG:
|
||||
buf.append('J');
|
||||
return;
|
||||
case DOUBLE:
|
||||
buf.append('D');
|
||||
return;
|
||||
// case ARRAY:
|
||||
// case OBJECT:
|
||||
default:
|
||||
buf.append(this.buf, off, len);
|
||||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------
|
||||
// Direct conversion from classes to type descriptors,
|
||||
// without intermediate Type objects
|
||||
// ------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the internal name of the given class. The internal name of a
|
||||
* class is its fully qualified name, where '.' are replaced by '/'.
|
||||
*
|
||||
* @param c an object class.
|
||||
* @return the internal name of the given class.
|
||||
*/
|
||||
public static String getInternalName(final Class c){
|
||||
return c.getName().replace('.', '/');
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the descriptor corresponding to the given Java type.
|
||||
*
|
||||
* @param c an object class, a primitive class or an array class.
|
||||
* @return the descriptor corresponding to the given class.
|
||||
*/
|
||||
public static String getDescriptor(final Class c){
|
||||
StringBuffer buf = new StringBuffer();
|
||||
getDescriptor(buf, c);
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the descriptor corresponding to the given constructor.
|
||||
*
|
||||
* @param c a {@link Constructor Constructor} object.
|
||||
* @return the descriptor of the given constructor.
|
||||
*/
|
||||
public static String getConstructorDescriptor(final Constructor c){
|
||||
Class[] parameters = c.getParameterTypes();
|
||||
StringBuffer buf = new StringBuffer();
|
||||
buf.append('(');
|
||||
for(int i = 0; i < parameters.length; ++i)
|
||||
{
|
||||
getDescriptor(buf, parameters[i]);
|
||||
}
|
||||
return buf.append(")V").toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the descriptor corresponding to the given method.
|
||||
*
|
||||
* @param m a {@link Method Method} object.
|
||||
* @return the descriptor of the given method.
|
||||
*/
|
||||
public static String getMethodDescriptor(final Method m){
|
||||
Class[] parameters = m.getParameterTypes();
|
||||
StringBuffer buf = new StringBuffer();
|
||||
buf.append('(');
|
||||
for(int i = 0; i < parameters.length; ++i)
|
||||
{
|
||||
getDescriptor(buf, parameters[i]);
|
||||
}
|
||||
buf.append(')');
|
||||
getDescriptor(buf, m.getReturnType());
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends the descriptor of the given class to the given string buffer.
|
||||
*
|
||||
* @param buf the string buffer to which the descriptor must be appended.
|
||||
* @param c the class whose descriptor must be computed.
|
||||
*/
|
||||
private static void getDescriptor(final StringBuffer buf, final Class c){
|
||||
Class d = c;
|
||||
while(true)
|
||||
{
|
||||
if(d.isPrimitive())
|
||||
{
|
||||
char car;
|
||||
if(d == Integer.TYPE)
|
||||
{
|
||||
car = 'I';
|
||||
}
|
||||
else if(d == Void.TYPE)
|
||||
{
|
||||
car = 'V';
|
||||
}
|
||||
else if(d == Boolean.TYPE)
|
||||
{
|
||||
car = 'Z';
|
||||
}
|
||||
else if(d == Byte.TYPE)
|
||||
{
|
||||
car = 'B';
|
||||
}
|
||||
else if(d == Character.TYPE)
|
||||
{
|
||||
car = 'C';
|
||||
}
|
||||
else if(d == Short.TYPE)
|
||||
{
|
||||
car = 'S';
|
||||
}
|
||||
else if(d == Double.TYPE)
|
||||
{
|
||||
car = 'D';
|
||||
}
|
||||
else if(d == Float.TYPE)
|
||||
{
|
||||
car = 'F';
|
||||
}
|
||||
else /* if (d == Long.TYPE) */
|
||||
{
|
||||
car = 'J';
|
||||
}
|
||||
buf.append(car);
|
||||
return;
|
||||
}
|
||||
else if(d.isArray())
|
||||
{
|
||||
buf.append('[');
|
||||
d = d.getComponentType();
|
||||
}
|
||||
else
|
||||
{
|
||||
buf.append('L');
|
||||
String name = d.getName();
|
||||
int len = name.length();
|
||||
for(int i = 0; i < len; ++i)
|
||||
{
|
||||
char car = name.charAt(i);
|
||||
buf.append(car == '.' ? '/' : car);
|
||||
}
|
||||
buf.append(';');
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------
|
||||
// Corresponding size and opcodes
|
||||
// ------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns the size of values of this type.
|
||||
*
|
||||
* @return the size of values of this type, i.e., 2 for <tt>long</tt> and
|
||||
* <tt>double</tt>, and 1 otherwise.
|
||||
*/
|
||||
public int getSize(){
|
||||
return sort == LONG || sort == DOUBLE ? 2 : 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a JVM instruction opcode adapted to this Java type.
|
||||
*
|
||||
* @param opcode a JVM instruction opcode. This opcode must be one of ILOAD,
|
||||
* ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL,
|
||||
* ISHR, IUSHR, IAND, IOR, IXOR and IRETURN.
|
||||
* @return an opcode that is similar to the given opcode, but adapted to
|
||||
* this Java type. For example, if this type is <tt>float</tt> and
|
||||
* <tt>opcode</tt> is IRETURN, this method returns FRETURN.
|
||||
*/
|
||||
public int getOpcode(final int opcode){
|
||||
if(opcode == Opcodes.IALOAD || opcode == Opcodes.IASTORE)
|
||||
{
|
||||
switch(sort)
|
||||
{
|
||||
case BOOLEAN:
|
||||
case BYTE:
|
||||
return opcode + 5;
|
||||
case CHAR:
|
||||
return opcode + 6;
|
||||
case SHORT:
|
||||
return opcode + 7;
|
||||
case INT:
|
||||
return opcode;
|
||||
case FLOAT:
|
||||
return opcode + 2;
|
||||
case LONG:
|
||||
return opcode + 1;
|
||||
case DOUBLE:
|
||||
return opcode + 3;
|
||||
// case ARRAY:
|
||||
// case OBJECT:
|
||||
default:
|
||||
return opcode + 4;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
switch(sort)
|
||||
{
|
||||
case VOID:
|
||||
return opcode + 5;
|
||||
case BOOLEAN:
|
||||
case CHAR:
|
||||
case BYTE:
|
||||
case SHORT:
|
||||
case INT:
|
||||
return opcode;
|
||||
case FLOAT:
|
||||
return opcode + 2;
|
||||
case LONG:
|
||||
return opcode + 1;
|
||||
case DOUBLE:
|
||||
return opcode + 3;
|
||||
// case ARRAY:
|
||||
// case OBJECT:
|
||||
default:
|
||||
return opcode + 4;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------
|
||||
// Equals, hashCode and toString
|
||||
// ------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Tests if the given object is equal to this type.
|
||||
*
|
||||
* @param o the object to be compared to this type.
|
||||
* @return <tt>true</tt> if the given object is equal to this type.
|
||||
*/
|
||||
public boolean equals(final Object o){
|
||||
if(this == o)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
if(!(o instanceof Type))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
Type t = (Type) o;
|
||||
if(sort != t.sort)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if(sort == Type.OBJECT || sort == Type.ARRAY)
|
||||
{
|
||||
if(len != t.len)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
for(int i = off, j = t.off, end = i + len; i < end; i++, j++)
|
||||
{
|
||||
if(buf[i] != t.buf[j])
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a hash code value for this type.
|
||||
*
|
||||
* @return a hash code value for this type.
|
||||
*/
|
||||
public int hashCode(){
|
||||
int hc = 13 * sort;
|
||||
if(sort == Type.OBJECT || sort == Type.ARRAY)
|
||||
{
|
||||
for(int i = off, end = i + len; i < end; i++)
|
||||
{
|
||||
hc = 17 * (hc + buf[i]);
|
||||
}
|
||||
}
|
||||
return hc;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a string representation of this type.
|
||||
*
|
||||
* @return the descriptor of this type.
|
||||
*/
|
||||
public String toString(){
|
||||
return getDescriptor();
|
||||
}
|
||||
}
|
||||
197
test/fixtures/java/clojure-util.java
vendored
Normal file
197
test/fixtures/java/clojure-util.java
vendored
Normal file
@@ -0,0 +1,197 @@
|
||||
/**
|
||||
* Copyright (c) Rich Hickey. All rights reserved.
|
||||
* The use and distribution terms for this software are covered by the
|
||||
* Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
||||
* which can be found in the file epl-v10.html at the root of this distribution.
|
||||
* By using this software in any fashion, you are agreeing to be bound by
|
||||
* the terms of this license.
|
||||
* You must not remove this notice, or any other, from this software.
|
||||
**/
|
||||
|
||||
/* rich Apr 19, 2008 */
|
||||
|
||||
package clojure.lang;
|
||||
|
||||
import java.lang.ref.Reference;
|
||||
import java.math.BigInteger;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.lang.ref.SoftReference;
|
||||
import java.lang.ref.ReferenceQueue;
|
||||
|
||||
public class Util{
|
||||
static public boolean equiv(Object k1, Object k2){
|
||||
if(k1 == k2)
|
||||
return true;
|
||||
if(k1 != null)
|
||||
{
|
||||
if(k1 instanceof Number && k2 instanceof Number)
|
||||
return Numbers.equal((Number)k1, (Number)k2);
|
||||
else if(k1 instanceof IPersistentCollection || k2 instanceof IPersistentCollection)
|
||||
return pcequiv(k1,k2);
|
||||
return k1.equals(k2);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static public boolean equiv(long k1, long k2){
|
||||
return k1 == k2;
|
||||
}
|
||||
|
||||
static public boolean equiv(Object k1, long k2){
|
||||
return equiv(k1, (Object)k2);
|
||||
}
|
||||
|
||||
static public boolean equiv(long k1, Object k2){
|
||||
return equiv((Object)k1, k2);
|
||||
}
|
||||
|
||||
static public boolean equiv(double k1, double k2){
|
||||
return k1 == k2;
|
||||
}
|
||||
|
||||
static public boolean equiv(Object k1, double k2){
|
||||
return equiv(k1, (Object)k2);
|
||||
}
|
||||
|
||||
static public boolean equiv(double k1, Object k2){
|
||||
return equiv((Object)k1, k2);
|
||||
}
|
||||
|
||||
static public boolean equiv(boolean k1, boolean k2){
|
||||
return k1 == k2;
|
||||
}
|
||||
|
||||
static public boolean equiv(Object k1, boolean k2){
|
||||
return equiv(k1, (Object)k2);
|
||||
}
|
||||
|
||||
static public boolean equiv(boolean k1, Object k2){
|
||||
return equiv((Object)k1, k2);
|
||||
}
|
||||
|
||||
static public boolean equiv(char c1, char c2) {
|
||||
return c1 == c2;
|
||||
}
|
||||
|
||||
static public boolean pcequiv(Object k1, Object k2){
|
||||
if(k1 instanceof IPersistentCollection)
|
||||
return ((IPersistentCollection)k1).equiv(k2);
|
||||
return ((IPersistentCollection)k2).equiv(k1);
|
||||
}
|
||||
|
||||
static public boolean equals(Object k1, Object k2){
|
||||
if(k1 == k2)
|
||||
return true;
|
||||
return k1 != null && k1.equals(k2);
|
||||
}
|
||||
|
||||
static public boolean identical(Object k1, Object k2){
|
||||
return k1 == k2;
|
||||
}
|
||||
|
||||
static public Class classOf(Object x){
|
||||
if(x != null)
|
||||
return x.getClass();
|
||||
return null;
|
||||
}
|
||||
|
||||
static public int compare(Object k1, Object k2){
|
||||
if(k1 == k2)
|
||||
return 0;
|
||||
if(k1 != null)
|
||||
{
|
||||
if(k2 == null)
|
||||
return 1;
|
||||
if(k1 instanceof Number)
|
||||
return Numbers.compare((Number) k1, (Number) k2);
|
||||
return ((Comparable) k1).compareTo(k2);
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
static public int hash(Object o){
|
||||
if(o == null)
|
||||
return 0;
|
||||
return o.hashCode();
|
||||
}
|
||||
|
||||
static public int hasheq(Object o){
|
||||
if(o == null)
|
||||
return 0;
|
||||
if(o instanceof Number)
|
||||
return Numbers.hasheq((Number)o);
|
||||
else if(o instanceof IHashEq)
|
||||
return ((IHashEq)o).hasheq();
|
||||
return o.hashCode();
|
||||
}
|
||||
|
||||
static public int hashCombine(int seed, int hash){
|
||||
//a la boost
|
||||
seed ^= hash + 0x9e3779b9 + (seed << 6) + (seed >> 2);
|
||||
return seed;
|
||||
}
|
||||
|
||||
static public boolean isPrimitive(Class c){
|
||||
return c != null && c.isPrimitive() && !(c == Void.TYPE);
|
||||
}
|
||||
|
||||
static public boolean isInteger(Object x){
|
||||
return x instanceof Integer
|
||||
|| x instanceof Long
|
||||
|| x instanceof BigInt
|
||||
|| x instanceof BigInteger;
|
||||
}
|
||||
|
||||
static public Object ret1(Object ret, Object nil){
|
||||
return ret;
|
||||
}
|
||||
|
||||
static public ISeq ret1(ISeq ret, Object nil){
|
||||
return ret;
|
||||
}
|
||||
|
||||
static public <K,V> void clearCache(ReferenceQueue rq, ConcurrentHashMap<K, Reference<V>> cache){
|
||||
//cleanup any dead entries
|
||||
if(rq.poll() != null)
|
||||
{
|
||||
while(rq.poll() != null)
|
||||
;
|
||||
for(Map.Entry<K, Reference<V>> e : cache.entrySet())
|
||||
{
|
||||
Reference<V> val = e.getValue();
|
||||
if(val != null && val.get() == null)
|
||||
cache.remove(e.getKey(), val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static public RuntimeException runtimeException(String s){
|
||||
return new RuntimeException(s);
|
||||
}
|
||||
|
||||
static public RuntimeException runtimeException(String s, Throwable e){
|
||||
return new RuntimeException(s, e);
|
||||
}
|
||||
|
||||
/**
|
||||
* Throw even checked exceptions without being required
|
||||
* to declare them or catch them. Suggested idiom:
|
||||
* <p>
|
||||
* <code>throw sneakyThrow( some exception );</code>
|
||||
*/
|
||||
static public RuntimeException sneakyThrow(Throwable t) {
|
||||
// http://www.mail-archive.com/javaposse@googlegroups.com/msg05984.html
|
||||
if (t == null)
|
||||
throw new NullPointerException();
|
||||
Util.<RuntimeException>sneakyThrow0(t);
|
||||
return null;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static private <T extends Throwable> void sneakyThrow0(Throwable t) throws T {
|
||||
throw (T) t;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
218
test/fixtures/javascript/bootstrap-modal.js
vendored
Normal file
218
test/fixtures/javascript/bootstrap-modal.js
vendored
Normal file
@@ -0,0 +1,218 @@
|
||||
/* =========================================================
|
||||
* bootstrap-modal.js v2.0.4
|
||||
* http://twitter.github.com/bootstrap/javascript.html#modals
|
||||
* =========================================================
|
||||
* Copyright 2012 Twitter, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* ========================================================= */
|
||||
|
||||
|
||||
!function ($) {
|
||||
|
||||
"use strict"; // jshint ;_;
|
||||
|
||||
|
||||
/* MODAL CLASS DEFINITION
|
||||
* ====================== */
|
||||
|
||||
var Modal = function (content, options) {
|
||||
this.options = options
|
||||
this.$element = $(content)
|
||||
.delegate('[data-dismiss="modal"]', 'click.dismiss.modal', $.proxy(this.hide, this))
|
||||
}
|
||||
|
||||
Modal.prototype = {
|
||||
|
||||
constructor: Modal
|
||||
|
||||
, toggle: function () {
|
||||
return this[!this.isShown ? 'show' : 'hide']()
|
||||
}
|
||||
|
||||
, show: function () {
|
||||
var that = this
|
||||
, e = $.Event('show')
|
||||
|
||||
this.$element.trigger(e)
|
||||
|
||||
if (this.isShown || e.isDefaultPrevented()) return
|
||||
|
||||
$('body').addClass('modal-open')
|
||||
|
||||
this.isShown = true
|
||||
|
||||
escape.call(this)
|
||||
backdrop.call(this, function () {
|
||||
var transition = $.support.transition && that.$element.hasClass('fade')
|
||||
|
||||
if (!that.$element.parent().length) {
|
||||
that.$element.appendTo(document.body) //don't move modals dom position
|
||||
}
|
||||
|
||||
that.$element
|
||||
.show()
|
||||
|
||||
if (transition) {
|
||||
that.$element[0].offsetWidth // force reflow
|
||||
}
|
||||
|
||||
that.$element.addClass('in')
|
||||
|
||||
transition ?
|
||||
that.$element.one($.support.transition.end, function () { that.$element.trigger('shown') }) :
|
||||
that.$element.trigger('shown')
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
, hide: function (e) {
|
||||
e && e.preventDefault()
|
||||
|
||||
var that = this
|
||||
|
||||
e = $.Event('hide')
|
||||
|
||||
this.$element.trigger(e)
|
||||
|
||||
if (!this.isShown || e.isDefaultPrevented()) return
|
||||
|
||||
this.isShown = false
|
||||
|
||||
$('body').removeClass('modal-open')
|
||||
|
||||
escape.call(this)
|
||||
|
||||
this.$element.removeClass('in')
|
||||
|
||||
$.support.transition && this.$element.hasClass('fade') ?
|
||||
hideWithTransition.call(this) :
|
||||
hideModal.call(this)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/* MODAL PRIVATE METHODS
|
||||
* ===================== */
|
||||
|
||||
function hideWithTransition() {
|
||||
var that = this
|
||||
, timeout = setTimeout(function () {
|
||||
that.$element.off($.support.transition.end)
|
||||
hideModal.call(that)
|
||||
}, 500)
|
||||
|
||||
this.$element.one($.support.transition.end, function () {
|
||||
clearTimeout(timeout)
|
||||
hideModal.call(that)
|
||||
})
|
||||
}
|
||||
|
||||
function hideModal(that) {
|
||||
this.$element
|
||||
.hide()
|
||||
.trigger('hidden')
|
||||
|
||||
backdrop.call(this)
|
||||
}
|
||||
|
||||
function backdrop(callback) {
|
||||
var that = this
|
||||
, animate = this.$element.hasClass('fade') ? 'fade' : ''
|
||||
|
||||
if (this.isShown && this.options.backdrop) {
|
||||
var doAnimate = $.support.transition && animate
|
||||
|
||||
this.$backdrop = $('<div class="modal-backdrop ' + animate + '" />')
|
||||
.appendTo(document.body)
|
||||
|
||||
if (this.options.backdrop != 'static') {
|
||||
this.$backdrop.click($.proxy(this.hide, this))
|
||||
}
|
||||
|
||||
if (doAnimate) this.$backdrop[0].offsetWidth // force reflow
|
||||
|
||||
this.$backdrop.addClass('in')
|
||||
|
||||
doAnimate ?
|
||||
this.$backdrop.one($.support.transition.end, callback) :
|
||||
callback()
|
||||
|
||||
} else if (!this.isShown && this.$backdrop) {
|
||||
this.$backdrop.removeClass('in')
|
||||
|
||||
$.support.transition && this.$element.hasClass('fade')?
|
||||
this.$backdrop.one($.support.transition.end, $.proxy(removeBackdrop, this)) :
|
||||
removeBackdrop.call(this)
|
||||
|
||||
} else if (callback) {
|
||||
callback()
|
||||
}
|
||||
}
|
||||
|
||||
function removeBackdrop() {
|
||||
this.$backdrop.remove()
|
||||
this.$backdrop = null
|
||||
}
|
||||
|
||||
function escape() {
|
||||
var that = this
|
||||
if (this.isShown && this.options.keyboard) {
|
||||
$(document).on('keyup.dismiss.modal', function ( e ) {
|
||||
e.which == 27 && that.hide()
|
||||
})
|
||||
} else if (!this.isShown) {
|
||||
$(document).off('keyup.dismiss.modal')
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* MODAL PLUGIN DEFINITION
|
||||
* ======================= */
|
||||
|
||||
$.fn.modal = function (option) {
|
||||
return this.each(function () {
|
||||
var $this = $(this)
|
||||
, data = $this.data('modal')
|
||||
, options = $.extend({}, $.fn.modal.defaults, $this.data(), typeof option == 'object' && option)
|
||||
if (!data) $this.data('modal', (data = new Modal(this, options)))
|
||||
if (typeof option == 'string') data[option]()
|
||||
else if (options.show) data.show()
|
||||
})
|
||||
}
|
||||
|
||||
$.fn.modal.defaults = {
|
||||
backdrop: true
|
||||
, keyboard: true
|
||||
, show: true
|
||||
}
|
||||
|
||||
$.fn.modal.Constructor = Modal
|
||||
|
||||
|
||||
/* MODAL DATA-API
|
||||
* ============== */
|
||||
|
||||
$(function () {
|
||||
$('body').on('click.modal.data-api', '[data-toggle="modal"]', function ( e ) {
|
||||
var $this = $(this), href
|
||||
, $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) //strip for ie7
|
||||
, option = $target.data('modal') ? 'toggle' : $.extend({}, $target.data(), $this.data())
|
||||
|
||||
e.preventDefault()
|
||||
$target.modal(option)
|
||||
})
|
||||
})
|
||||
|
||||
}(window.jQuery);
|
||||
1838
test/fixtures/javascript/http.js
vendored
Normal file
1838
test/fixtures/javascript/http.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
9404
test/fixtures/javascript/jquery-1.7.2.js
vendored
Normal file
9404
test/fixtures/javascript/jquery-1.7.2.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1295
test/fixtures/javascript/modernizr.js
vendored
Normal file
1295
test/fixtures/javascript/modernizr.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1004
test/fixtures/objective-c/ASIHTTPRequest.h
vendored
Normal file
1004
test/fixtures/objective-c/ASIHTTPRequest.h
vendored
Normal file
File diff suppressed because it is too large
Load Diff
5125
test/fixtures/objective-c/ASIHTTPRequest.m
vendored
Normal file
5125
test/fixtures/objective-c/ASIHTTPRequest.m
vendored
Normal file
File diff suppressed because it is too large
Load Diff
251
test/fixtures/objective-c/JSONKit.h
vendored
Normal file
251
test/fixtures/objective-c/JSONKit.h
vendored
Normal file
@@ -0,0 +1,251 @@
|
||||
//
|
||||
// JSONKit.h
|
||||
// http://github.com/johnezang/JSONKit
|
||||
// Dual licensed under either the terms of the BSD License, or alternatively
|
||||
// under the terms of the Apache License, Version 2.0, as specified below.
|
||||
//
|
||||
|
||||
/*
|
||||
Copyright (c) 2011, John Engelhart
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the Zang Industries nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
/*
|
||||
Copyright 2011 John Engelhart
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
#include <limits.h>
|
||||
#include <TargetConditionals.h>
|
||||
#include <AvailabilityMacros.h>
|
||||
|
||||
#ifdef __OBJC__
|
||||
#import <Foundation/NSArray.h>
|
||||
#import <Foundation/NSData.h>
|
||||
#import <Foundation/NSDictionary.h>
|
||||
#import <Foundation/NSError.h>
|
||||
#import <Foundation/NSObjCRuntime.h>
|
||||
#import <Foundation/NSString.h>
|
||||
#endif // __OBJC__
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
|
||||
// For Mac OS X < 10.5.
|
||||
#ifndef NSINTEGER_DEFINED
|
||||
#define NSINTEGER_DEFINED
|
||||
#if defined(__LP64__) || defined(NS_BUILD_32_LIKE_64)
|
||||
typedef long NSInteger;
|
||||
typedef unsigned long NSUInteger;
|
||||
#define NSIntegerMin LONG_MIN
|
||||
#define NSIntegerMax LONG_MAX
|
||||
#define NSUIntegerMax ULONG_MAX
|
||||
#else // defined(__LP64__) || defined(NS_BUILD_32_LIKE_64)
|
||||
typedef int NSInteger;
|
||||
typedef unsigned int NSUInteger;
|
||||
#define NSIntegerMin INT_MIN
|
||||
#define NSIntegerMax INT_MAX
|
||||
#define NSUIntegerMax UINT_MAX
|
||||
#endif // defined(__LP64__) || defined(NS_BUILD_32_LIKE_64)
|
||||
#endif // NSINTEGER_DEFINED
|
||||
|
||||
|
||||
#ifndef _JSONKIT_H_
|
||||
#define _JSONKIT_H_
|
||||
|
||||
#if defined(__GNUC__) && (__GNUC__ >= 4) && defined(__APPLE_CC__) && (__APPLE_CC__ >= 5465)
|
||||
#define JK_DEPRECATED_ATTRIBUTE __attribute__((deprecated))
|
||||
#else
|
||||
#define JK_DEPRECATED_ATTRIBUTE
|
||||
#endif
|
||||
|
||||
#define JSONKIT_VERSION_MAJOR 1
|
||||
#define JSONKIT_VERSION_MINOR 4
|
||||
|
||||
typedef NSUInteger JKFlags;
|
||||
|
||||
/*
|
||||
JKParseOptionComments : Allow C style // and /_* ... *_/ (without a _, obviously) comments in JSON.
|
||||
JKParseOptionUnicodeNewlines : Allow Unicode recommended (?:\r\n|[\n\v\f\r\x85\p{Zl}\p{Zp}]) newlines.
|
||||
JKParseOptionLooseUnicode : Normally the decoder will stop with an error at any malformed Unicode.
|
||||
This option allows JSON with malformed Unicode to be parsed without reporting an error.
|
||||
Any malformed Unicode is replaced with \uFFFD, or "REPLACEMENT CHARACTER".
|
||||
*/
|
||||
|
||||
enum {
|
||||
JKParseOptionNone = 0,
|
||||
JKParseOptionStrict = 0,
|
||||
JKParseOptionComments = (1 << 0),
|
||||
JKParseOptionUnicodeNewlines = (1 << 1),
|
||||
JKParseOptionLooseUnicode = (1 << 2),
|
||||
JKParseOptionPermitTextAfterValidJSON = (1 << 3),
|
||||
JKParseOptionValidFlags = (JKParseOptionComments | JKParseOptionUnicodeNewlines | JKParseOptionLooseUnicode | JKParseOptionPermitTextAfterValidJSON),
|
||||
};
|
||||
typedef JKFlags JKParseOptionFlags;
|
||||
|
||||
enum {
|
||||
JKSerializeOptionNone = 0,
|
||||
JKSerializeOptionPretty = (1 << 0),
|
||||
JKSerializeOptionEscapeUnicode = (1 << 1),
|
||||
JKSerializeOptionEscapeForwardSlashes = (1 << 4),
|
||||
JKSerializeOptionValidFlags = (JKSerializeOptionPretty | JKSerializeOptionEscapeUnicode | JKSerializeOptionEscapeForwardSlashes),
|
||||
};
|
||||
typedef JKFlags JKSerializeOptionFlags;
|
||||
|
||||
#ifdef __OBJC__
|
||||
|
||||
typedef struct JKParseState JKParseState; // Opaque internal, private type.
|
||||
|
||||
// As a general rule of thumb, if you use a method that doesn't accept a JKParseOptionFlags argument, it defaults to JKParseOptionStrict
|
||||
|
||||
@interface JSONDecoder : NSObject {
|
||||
JKParseState *parseState;
|
||||
}
|
||||
+ (id)decoder;
|
||||
+ (id)decoderWithParseOptions:(JKParseOptionFlags)parseOptionFlags;
|
||||
- (id)initWithParseOptions:(JKParseOptionFlags)parseOptionFlags;
|
||||
- (void)clearCache;
|
||||
|
||||
// The parse... methods were deprecated in v1.4 in favor of the v1.4 objectWith... methods.
|
||||
- (id)parseUTF8String:(const unsigned char *)string length:(size_t)length JK_DEPRECATED_ATTRIBUTE; // Deprecated in JSONKit v1.4. Use objectWithUTF8String:length: instead.
|
||||
- (id)parseUTF8String:(const unsigned char *)string length:(size_t)length error:(NSError **)error JK_DEPRECATED_ATTRIBUTE; // Deprecated in JSONKit v1.4. Use objectWithUTF8String:length:error: instead.
|
||||
// The NSData MUST be UTF8 encoded JSON.
|
||||
- (id)parseJSONData:(NSData *)jsonData JK_DEPRECATED_ATTRIBUTE; // Deprecated in JSONKit v1.4. Use objectWithData: instead.
|
||||
- (id)parseJSONData:(NSData *)jsonData error:(NSError **)error JK_DEPRECATED_ATTRIBUTE; // Deprecated in JSONKit v1.4. Use objectWithData:error: instead.
|
||||
|
||||
// Methods that return immutable collection objects.
|
||||
- (id)objectWithUTF8String:(const unsigned char *)string length:(NSUInteger)length;
|
||||
- (id)objectWithUTF8String:(const unsigned char *)string length:(NSUInteger)length error:(NSError **)error;
|
||||
// The NSData MUST be UTF8 encoded JSON.
|
||||
- (id)objectWithData:(NSData *)jsonData;
|
||||
- (id)objectWithData:(NSData *)jsonData error:(NSError **)error;
|
||||
|
||||
// Methods that return mutable collection objects.
|
||||
- (id)mutableObjectWithUTF8String:(const unsigned char *)string length:(NSUInteger)length;
|
||||
- (id)mutableObjectWithUTF8String:(const unsigned char *)string length:(NSUInteger)length error:(NSError **)error;
|
||||
// The NSData MUST be UTF8 encoded JSON.
|
||||
- (id)mutableObjectWithData:(NSData *)jsonData;
|
||||
- (id)mutableObjectWithData:(NSData *)jsonData error:(NSError **)error;
|
||||
|
||||
@end
|
||||
|
||||
////////////
|
||||
#pragma mark Deserializing methods
|
||||
////////////
|
||||
|
||||
@interface NSString (JSONKitDeserializing)
|
||||
- (id)objectFromJSONString;
|
||||
- (id)objectFromJSONStringWithParseOptions:(JKParseOptionFlags)parseOptionFlags;
|
||||
- (id)objectFromJSONStringWithParseOptions:(JKParseOptionFlags)parseOptionFlags error:(NSError **)error;
|
||||
- (id)mutableObjectFromJSONString;
|
||||
- (id)mutableObjectFromJSONStringWithParseOptions:(JKParseOptionFlags)parseOptionFlags;
|
||||
- (id)mutableObjectFromJSONStringWithParseOptions:(JKParseOptionFlags)parseOptionFlags error:(NSError **)error;
|
||||
@end
|
||||
|
||||
@interface NSData (JSONKitDeserializing)
|
||||
// The NSData MUST be UTF8 encoded JSON.
|
||||
- (id)objectFromJSONData;
|
||||
- (id)objectFromJSONDataWithParseOptions:(JKParseOptionFlags)parseOptionFlags;
|
||||
- (id)objectFromJSONDataWithParseOptions:(JKParseOptionFlags)parseOptionFlags error:(NSError **)error;
|
||||
- (id)mutableObjectFromJSONData;
|
||||
- (id)mutableObjectFromJSONDataWithParseOptions:(JKParseOptionFlags)parseOptionFlags;
|
||||
- (id)mutableObjectFromJSONDataWithParseOptions:(JKParseOptionFlags)parseOptionFlags error:(NSError **)error;
|
||||
@end
|
||||
|
||||
////////////
|
||||
#pragma mark Serializing methods
|
||||
////////////
|
||||
|
||||
@interface NSString (JSONKitSerializing)
|
||||
// Convenience methods for those that need to serialize the receiving NSString (i.e., instead of having to serialize a NSArray with a single NSString, you can "serialize to JSON" just the NSString).
|
||||
// Normally, a string that is serialized to JSON has quotation marks surrounding it, which you may or may not want when serializing a single string, and can be controlled with includeQuotes:
|
||||
// includeQuotes:YES `a "test"...` -> `"a \"test\"..."`
|
||||
// includeQuotes:NO `a "test"...` -> `a \"test\"...`
|
||||
- (NSData *)JSONData; // Invokes JSONDataWithOptions:JKSerializeOptionNone includeQuotes:YES
|
||||
- (NSData *)JSONDataWithOptions:(JKSerializeOptionFlags)serializeOptions includeQuotes:(BOOL)includeQuotes error:(NSError **)error;
|
||||
- (NSString *)JSONString; // Invokes JSONStringWithOptions:JKSerializeOptionNone includeQuotes:YES
|
||||
- (NSString *)JSONStringWithOptions:(JKSerializeOptionFlags)serializeOptions includeQuotes:(BOOL)includeQuotes error:(NSError **)error;
|
||||
@end
|
||||
|
||||
@interface NSArray (JSONKitSerializing)
|
||||
- (NSData *)JSONData;
|
||||
- (NSData *)JSONDataWithOptions:(JKSerializeOptionFlags)serializeOptions error:(NSError **)error;
|
||||
- (NSData *)JSONDataWithOptions:(JKSerializeOptionFlags)serializeOptions serializeUnsupportedClassesUsingDelegate:(id)delegate selector:(SEL)selector error:(NSError **)error;
|
||||
- (NSString *)JSONString;
|
||||
- (NSString *)JSONStringWithOptions:(JKSerializeOptionFlags)serializeOptions error:(NSError **)error;
|
||||
- (NSString *)JSONStringWithOptions:(JKSerializeOptionFlags)serializeOptions serializeUnsupportedClassesUsingDelegate:(id)delegate selector:(SEL)selector error:(NSError **)error;
|
||||
@end
|
||||
|
||||
@interface NSDictionary (JSONKitSerializing)
|
||||
- (NSData *)JSONData;
|
||||
- (NSData *)JSONDataWithOptions:(JKSerializeOptionFlags)serializeOptions error:(NSError **)error;
|
||||
- (NSData *)JSONDataWithOptions:(JKSerializeOptionFlags)serializeOptions serializeUnsupportedClassesUsingDelegate:(id)delegate selector:(SEL)selector error:(NSError **)error;
|
||||
- (NSString *)JSONString;
|
||||
- (NSString *)JSONStringWithOptions:(JKSerializeOptionFlags)serializeOptions error:(NSError **)error;
|
||||
- (NSString *)JSONStringWithOptions:(JKSerializeOptionFlags)serializeOptions serializeUnsupportedClassesUsingDelegate:(id)delegate selector:(SEL)selector error:(NSError **)error;
|
||||
@end
|
||||
|
||||
#ifdef __BLOCKS__
|
||||
|
||||
@interface NSArray (JSONKitSerializingBlockAdditions)
|
||||
- (NSData *)JSONDataWithOptions:(JKSerializeOptionFlags)serializeOptions serializeUnsupportedClassesUsingBlock:(id(^)(id object))block error:(NSError **)error;
|
||||
- (NSString *)JSONStringWithOptions:(JKSerializeOptionFlags)serializeOptions serializeUnsupportedClassesUsingBlock:(id(^)(id object))block error:(NSError **)error;
|
||||
@end
|
||||
|
||||
@interface NSDictionary (JSONKitSerializingBlockAdditions)
|
||||
- (NSData *)JSONDataWithOptions:(JKSerializeOptionFlags)serializeOptions serializeUnsupportedClassesUsingBlock:(id(^)(id object))block error:(NSError **)error;
|
||||
- (NSString *)JSONStringWithOptions:(JKSerializeOptionFlags)serializeOptions serializeUnsupportedClassesUsingBlock:(id(^)(id object))block error:(NSError **)error;
|
||||
@end
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
#endif // __OBJC__
|
||||
|
||||
#endif // _JSONKIT_H_
|
||||
|
||||
#ifdef __cplusplus
|
||||
} // extern "C"
|
||||
#endif
|
||||
3059
test/fixtures/objective-c/JSONKit.m
vendored
Normal file
3059
test/fixtures/objective-c/JSONKit.m
vendored
Normal file
File diff suppressed because it is too large
Load Diff
21
test/fixtures/objective-c/MainMenuViewController.h
vendored
Normal file
21
test/fixtures/objective-c/MainMenuViewController.h
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
//
|
||||
// Copyright 2009-2011 Facebook
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
@interface MainMenuViewController : TTTableViewController {
|
||||
|
||||
}
|
||||
|
||||
@end
|
||||
157
test/fixtures/objective-c/MainMenuViewController.m
vendored
Normal file
157
test/fixtures/objective-c/MainMenuViewController.m
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
//
|
||||
// Copyright 2009-2011 Facebook
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
#import "MainMenuViewController.h"
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
@implementation MainMenuViewController
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil {
|
||||
if (self = [super initWithNibName:nil bundle:nil]) {
|
||||
self.title = @"Style Catalog";
|
||||
//self.variableHeightRows = YES;
|
||||
self.tableViewStyle = UITableViewStyleGrouped;
|
||||
|
||||
self.dataSource =
|
||||
[TTSectionedDataSource dataSourceWithObjects:
|
||||
@"Text Styles",
|
||||
[TTTableTextItem itemWithText:@"Link Text"
|
||||
URL:@"tt://styles/linkText:/text"],
|
||||
[TTTableTextItem itemWithText:@"Mini Badge"
|
||||
URL:@"tt://styles/miniBadge/text"],
|
||||
[TTTableTextItem itemWithText:@"Badge"
|
||||
URL:@"tt://styles/badge/text"],
|
||||
[TTTableTextItem itemWithText:@"Large Badge"
|
||||
URL:@"tt://styles/largeBadge/text"],
|
||||
|
||||
@"Views",
|
||||
[TTTableTextItem itemWithText:@"Post Text Editor"
|
||||
URL:@"tt://styles/postTextEditor/view"],
|
||||
[TTTableTextItem itemWithText:@"Photo Caption"
|
||||
URL:@"tt://styles/photoCaption/view"],
|
||||
[TTTableTextItem itemWithText:@"Photo Status Label"
|
||||
URL:@"tt://styles/photoStatusLabel/view"],
|
||||
[TTTableTextItem itemWithText:@"Page Dot"
|
||||
URL:@"tt://styles/pageDot:/view"],
|
||||
[TTTableTextItem itemWithText:@"Highlighted Link"
|
||||
URL:@"tt://styles/linkHighlighted/view"],
|
||||
[TTTableTextItem itemWithText:@"Table Header"
|
||||
URL:@"tt://styles/tableHeader/view"],
|
||||
[TTTableTextItem itemWithText:@"Picker Cell"
|
||||
URL:@"tt://styles/pickerCell:/view"],
|
||||
[TTTableTextItem itemWithText:@"Search Table Shadow"
|
||||
URL:@"tt://styles/searchTableShadow/view"],
|
||||
[TTTableTextItem itemWithText:@"Black Bezel"
|
||||
URL:@"tt://styles/blackBezel/view"],
|
||||
[TTTableTextItem itemWithText:@"White Bezel"
|
||||
URL:@"tt://styles/whiteBezel/view"],
|
||||
[TTTableTextItem itemWithText:@"Black Banner"
|
||||
URL:@"tt://styles/blackBanner/view"],
|
||||
[TTTableTextItem itemWithText:@"Tab Bar"
|
||||
URL:@"tt://styles/tabBar/view"],
|
||||
[TTTableTextItem itemWithText:@"Tab Strip"
|
||||
URL:@"tt://styles/tabStrip/view"],
|
||||
|
||||
@"Tab Grid",
|
||||
[TTTableTextItem itemWithText:@"Tab Grid"
|
||||
URL:@"tt://styles/tabGrid/view"],
|
||||
[TTTableTextItem itemWithText:@"Tab Grid Top Left"
|
||||
URL:@"tt://styles/tabGridTabTopLeft:/view"],
|
||||
[TTTableTextItem itemWithText:@"Tab Grid Top Right"
|
||||
URL:@"tt://styles/tabGridTabTopRight:/view"],
|
||||
[TTTableTextItem itemWithText:@"Tab Grid Bottom Right"
|
||||
URL:@"tt://styles/tabGridTabBottomRight:/view"],
|
||||
[TTTableTextItem itemWithText:@"Tab Grid Bottom Left"
|
||||
URL:@"tt://styles/tabGridTabBottomLeft:/view"],
|
||||
[TTTableTextItem itemWithText:@"Tab Grid Left"
|
||||
URL:@"tt://styles/tabGridTabLeft:/view"],
|
||||
[TTTableTextItem itemWithText:@"Tab Grid Right"
|
||||
URL:@"tt://styles/tabGridTabRight:/view"],
|
||||
[TTTableTextItem itemWithText:@"Tab Grid Center"
|
||||
URL:@"tt://styles/tabGridTabCenter:/view"],
|
||||
|
||||
@"Tabs",
|
||||
[TTTableTextItem itemWithText:@"Tab"
|
||||
URL:@"tt://styles/tab:/view"],
|
||||
[TTTableTextItem itemWithText:@"Round Tab"
|
||||
URL:@"tt://styles/tabRound:/view"],
|
||||
[TTTableTextItem itemWithText:@"Tab Left Overflow"
|
||||
URL:@"tt://styles/tabOverflowLeft/view"],
|
||||
[TTTableTextItem itemWithText:@"Tab Right Overflow"
|
||||
URL:@"tt://styles/tabOverflowRight/view"],
|
||||
|
||||
@"Images",
|
||||
[TTTableTextItem itemWithText:@"Thumb View"
|
||||
URL:@"tt://styles/thumbView:/image"],
|
||||
|
||||
@"Launcher",
|
||||
[TTTableTextItem itemWithText:@"Launcher Button"
|
||||
URL:@"tt://styles/launcherButton:/image"],
|
||||
[TTTableTextItem itemWithText:@"Launcher Close Button"
|
||||
URL:@"tt://styles/launcherCloseButton:/view"],
|
||||
|
||||
@"Text Bar",
|
||||
[TTTableTextItem itemWithText:@"Text Bar"
|
||||
URL:@"tt://styles/textBar/view"],
|
||||
[TTTableTextItem itemWithText:@"Text Bar Footer"
|
||||
URL:@"tt://styles/textBarFooter/view"],
|
||||
[TTTableTextItem itemWithText:@"Text Bar Text Field"
|
||||
URL:@"tt://styles/textBarTextField/view"],
|
||||
[TTTableTextItem itemWithText:@"Text Bar Post Button"
|
||||
URL:@"tt://styles/textBarPostButton:/text"],
|
||||
|
||||
@"Toolbars",
|
||||
[TTTableTextItem itemWithText:@"Toolbar Button"
|
||||
URL:@"tt://styles/toolbarButton:/view"],
|
||||
[TTTableTextItem itemWithText:@"Toolbar Back Button"
|
||||
URL:@"tt://styles/toolbarBackButton:/view"],
|
||||
[TTTableTextItem itemWithText:@"Toolbar Forward Button"
|
||||
URL:@"tt://styles/toolbarForwardButton:/view"],
|
||||
[TTTableTextItem itemWithText:@"Toolbar Round Button"
|
||||
URL:@"tt://styles/toolbarRoundButton:/view"],
|
||||
[TTTableTextItem itemWithText:@"Black Toolbar Button"
|
||||
URL:@"tt://styles/blackToolbarButton:/view"],
|
||||
[TTTableTextItem itemWithText:@"Gray Toolbar Button"
|
||||
URL:@"tt://styles/grayToolbarButton:/view"],
|
||||
[TTTableTextItem itemWithText:@"Black Toolbar Forward Button"
|
||||
URL:@"tt://styles/blackToolbarForwardButton:/view"],
|
||||
[TTTableTextItem itemWithText:@"Black Toolbar Round Button"
|
||||
URL:@"tt://styles/blackToolbarRoundButton:/view"],
|
||||
|
||||
@"Search",
|
||||
[TTTableTextItem itemWithText:@"Search Text Field"
|
||||
URL:@"tt://styles/searchTextField/view"],
|
||||
[TTTableTextItem itemWithText:@"Search Bar"
|
||||
URL:@"tt://styles/searchBar/view"],
|
||||
[TTTableTextItem itemWithText:@"Search Bar Bottom"
|
||||
URL:@"tt://styles/searchBarBottom/view"],
|
||||
[TTTableTextItem itemWithText:@"Black Search Bar"
|
||||
URL:@"tt://styles/blackSearchBar/view"],
|
||||
|
||||
nil];
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
|
||||
@end
|
||||
|
||||
22
test/fixtures/objective-c/PlaygroundViewController.h
vendored
Normal file
22
test/fixtures/objective-c/PlaygroundViewController.h
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
//
|
||||
// Copyright 2009-2011 Facebook
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
|
||||
@interface PlaygroundViewController : UIViewController {
|
||||
UIScrollView* _scrollView;
|
||||
}
|
||||
|
||||
@end
|
||||
200
test/fixtures/objective-c/PlaygroundViewController.m
vendored
Normal file
200
test/fixtures/objective-c/PlaygroundViewController.m
vendored
Normal file
@@ -0,0 +1,200 @@
|
||||
//
|
||||
// Copyright 2009-2011 Facebook
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
#import "PlaygroundViewController.h"
|
||||
|
||||
#import <Three20Core/NSDataAdditions.h>
|
||||
|
||||
static const CGFloat kFramePadding = 10;
|
||||
static const CGFloat kElementSpacing = 5;
|
||||
static const CGFloat kGroupSpacing = 10;
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
@implementation PlaygroundViewController
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
- (CGFloat) addHeader:(NSString*)text yOffset:(CGFloat)yOffset {
|
||||
UILabel* label = [[UILabel alloc] initWithFrame:CGRectZero];
|
||||
label.text = text;
|
||||
label.font = [UIFont systemFontOfSize:20];
|
||||
label.numberOfLines = 0;
|
||||
|
||||
CGRect frame = label.frame;
|
||||
frame.origin.x = kFramePadding;
|
||||
frame.origin.y = yOffset;
|
||||
frame.size.width = 320 - kFramePadding * 2;
|
||||
frame.size.height = [text sizeWithFont:label.font
|
||||
constrainedToSize:CGSizeMake(frame.size.width, 10000)].height;
|
||||
label.frame = frame;
|
||||
|
||||
[_scrollView addSubview:label];
|
||||
|
||||
yOffset += label.frame.size.height + kElementSpacing;
|
||||
|
||||
TT_RELEASE_SAFELY(label);
|
||||
|
||||
return yOffset;
|
||||
}
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
- (CGFloat) addText:(NSString*)text yOffset:(CGFloat)yOffset {
|
||||
UILabel* label = [[UILabel alloc] initWithFrame:CGRectZero];
|
||||
label.text = text;
|
||||
label.numberOfLines = 0;
|
||||
|
||||
CGRect frame = label.frame;
|
||||
frame.origin.x = kFramePadding;
|
||||
frame.origin.y = yOffset;
|
||||
frame.size.width = 320 - kFramePadding * 2;
|
||||
frame.size.height = [text sizeWithFont:label.font
|
||||
constrainedToSize:CGSizeMake(frame.size.width, 10000)].height;
|
||||
label.frame = frame;
|
||||
|
||||
[_scrollView addSubview:label];
|
||||
|
||||
yOffset += label.frame.size.height + kElementSpacing;
|
||||
|
||||
TT_RELEASE_SAFELY(label);
|
||||
|
||||
return yOffset;
|
||||
}
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
- (void) loadView {
|
||||
[super loadView];
|
||||
|
||||
_scrollView = [[UIScrollView alloc] initWithFrame:self.view.bounds];
|
||||
_scrollView.autoresizingMask =
|
||||
UIViewAutoresizingFlexibleWidth
|
||||
| UIViewAutoresizingFlexibleHeight;
|
||||
|
||||
[self.view addSubview:_scrollView];
|
||||
|
||||
CGFloat yOffset = kFramePadding;
|
||||
|
||||
yOffset = [self addHeader:NSLocalizedString(@"TTDebug", @"") yOffset:yOffset];
|
||||
|
||||
{
|
||||
UIButton* button = [UIButton buttonWithType:UIButtonTypeRoundedRect];
|
||||
[button setTitle:NSLocalizedString(@"Debug test", @"") forState:UIControlStateNormal];
|
||||
[button addTarget: self
|
||||
action: @selector(debugTestAction)
|
||||
forControlEvents: UIControlEventTouchUpInside];
|
||||
[button sizeToFit];
|
||||
|
||||
CGRect frame = button.frame;
|
||||
frame.origin.x = kFramePadding;
|
||||
frame.origin.y = yOffset;
|
||||
button.frame = frame;
|
||||
|
||||
[_scrollView addSubview:button];
|
||||
|
||||
yOffset += frame.size.height;
|
||||
}
|
||||
|
||||
yOffset += kGroupSpacing;
|
||||
|
||||
yOffset = [self addHeader:NSLocalizedString(@"TTGlobalCoreLocale", @"") yOffset:yOffset];
|
||||
yOffset = [self addText:[NSString stringWithFormat:NSLocalizedString(@"Current locale: %@", @""),
|
||||
[TTCurrentLocale()
|
||||
displayNameForKey:NSLocaleIdentifier
|
||||
value:[TTCurrentLocale() localeIdentifier]]]
|
||||
yOffset:yOffset];
|
||||
yOffset += kGroupSpacing;
|
||||
|
||||
yOffset = [self addHeader:NSLocalizedString(@"TTGlobalCorePaths", @"") yOffset:yOffset];
|
||||
yOffset = [self addText:[NSString stringWithFormat:NSLocalizedString(@"Bundle path: %@", @""),
|
||||
TTPathForBundleResource(@"Icon.png")]
|
||||
yOffset:yOffset];
|
||||
yOffset = [self addText:[NSString stringWithFormat:NSLocalizedString(@"Document path: %@", @""),
|
||||
TTPathForDocumentsResource(@"document.pdf")]
|
||||
yOffset:yOffset];
|
||||
yOffset += kGroupSpacing;
|
||||
|
||||
yOffset = [self addHeader:NSLocalizedString(@"NSDataAdditions", @"") yOffset:yOffset];
|
||||
yOffset = [self addText:[NSString stringWithFormat:NSLocalizedString(@"MD5 Hash of \"Three20\": %@", @""),
|
||||
[[@"Three20" dataUsingEncoding:NSUTF8StringEncoding] md5Hash]]
|
||||
yOffset:yOffset];
|
||||
yOffset += kGroupSpacing;
|
||||
|
||||
[_scrollView setContentSize:CGSizeMake(320, yOffset)];
|
||||
}
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
- (void) viewDidUnload {
|
||||
[super viewDidUnload];
|
||||
|
||||
TT_RELEASE_SAFELY(_scrollView);
|
||||
}
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
- (void) viewDidAppear:(BOOL)animated {
|
||||
[super viewDidAppear:animated];
|
||||
|
||||
[_scrollView flashScrollIndicators];
|
||||
}
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
- (void) debugTestAction {
|
||||
#ifdef DEBUG
|
||||
NSLog(@"Three20 debug logging is currently...ON");
|
||||
#else
|
||||
NSLog(@"Three20 debug logging is currently...OFF");
|
||||
#endif
|
||||
|
||||
// This will print the current method name.
|
||||
TTDPRINTMETHODNAME();
|
||||
|
||||
TTDPRINT(@"Showing TTDPRINT.");
|
||||
TTDPRINT(@"-----------------");
|
||||
TTDPRINT(@"Showing TTD log levels <= %d", TTMAXLOGLEVEL);
|
||||
TTDERROR(@"This is TTDERROR, level %d.", TTLOGLEVEL_ERROR);
|
||||
TTDWARNING(@"This is TTDWARNING, level %d.", TTLOGLEVEL_WARNING);
|
||||
TTDINFO(@"This is TTDINFO, level %d.", TTLOGLEVEL_INFO);
|
||||
|
||||
TTDPRINT(@"");
|
||||
TTDPRINT(@"Showing TTDCONDITIONLOG.");
|
||||
TTDPRINT(@"------------------------");
|
||||
TTDCONDITIONLOG(true, @"This will always display because the condition is \"true\"");
|
||||
TTDCONDITIONLOG(false, @"This will never display because the condition is \"false\"");
|
||||
TTDCONDITIONLOG(rand()%2, @"This will randomly display because the condition is \"rand()%2\"");
|
||||
|
||||
TTDPRINT(@"");
|
||||
TTDPRINT(@"Showing TTDASSERT.");
|
||||
TTDPRINT(@"------------------");
|
||||
// Should do nothing at all.
|
||||
TTDASSERT(true);
|
||||
|
||||
// This will jump you into the debugger in the simulator.
|
||||
// Note that this isn't a crash! Simply the equivalent of setting
|
||||
// a breakpoint in the debugger, but programmatically. These TTDASSERTs
|
||||
// will be completely stripped away from your final product, assuming
|
||||
// you don't declare the DEBUG preprocessor macro (and you shouldn't in
|
||||
// your final product).
|
||||
TTDASSERT(false);
|
||||
}
|
||||
|
||||
|
||||
@end
|
||||
101
test/fixtures/objective-c/SBJsonParser.h
vendored
Normal file
101
test/fixtures/objective-c/SBJsonParser.h
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
/*
|
||||
Copyright (C) 2009 Stig Brautaset. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the author nor the names of its contributors may be used
|
||||
to endorse or promote products derived from this software without specific
|
||||
prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/**
|
||||
@brief Parse JSON Strings and NSData objects
|
||||
|
||||
This uses SBJsonStreamParser internally.
|
||||
|
||||
@see @ref objc2json
|
||||
|
||||
*/
|
||||
|
||||
@interface SBJsonParser : NSObject
|
||||
|
||||
/**
|
||||
@brief The maximum recursing depth.
|
||||
|
||||
Defaults to 32. If the input is nested deeper than this the input will be deemed to be
|
||||
malicious and the parser returns nil, signalling an error. ("Nested too deep".) You can
|
||||
turn off this security feature by setting the maxDepth value to 0.
|
||||
*/
|
||||
@property NSUInteger maxDepth;
|
||||
|
||||
/**
|
||||
@brief Description of parse error
|
||||
|
||||
This method returns the trace of the last method that failed.
|
||||
You need to check the return value of the call you're making to figure out
|
||||
if the call actually failed, before you know call this method.
|
||||
|
||||
@return A string describing the error encountered, or nil if no error occured.
|
||||
|
||||
*/
|
||||
@property(copy) NSString *error;
|
||||
|
||||
/**
|
||||
@brief Return the object represented by the given NSData object.
|
||||
|
||||
The data *must* be UTF8 encoded.
|
||||
|
||||
@param data An NSData containing UTF8 encoded data to parse.
|
||||
@return The NSArray or NSDictionary represented by the object, or nil if an error occured.
|
||||
|
||||
*/
|
||||
- (id)objectWithData:(NSData*)data;
|
||||
|
||||
/**
|
||||
@brief Return the object represented by the given string
|
||||
|
||||
This method converts its input to an NSData object containing UTF8 and calls -objectWithData: with it.
|
||||
|
||||
@return The NSArray or NSDictionary represented by the object, or nil if an error occured.
|
||||
*/
|
||||
- (id)objectWithString:(NSString *)repr;
|
||||
|
||||
/**
|
||||
@brief Return the object represented by the given string
|
||||
|
||||
This method calls objectWithString: internally. If an error occurs, and if @p error
|
||||
is not nil, it creates an NSError object and returns this through its second argument.
|
||||
|
||||
@param jsonText the json string to parse
|
||||
@param error pointer to an NSError object to populate on error
|
||||
|
||||
@return The NSArray or NSDictionary represented by the object, or nil if an error occured.
|
||||
*/
|
||||
|
||||
- (id)objectWithString:(NSString*)jsonText
|
||||
error:(NSError**)error;
|
||||
|
||||
@end
|
||||
|
||||
|
||||
100
test/fixtures/objective-c/SBJsonParser.m
vendored
Normal file
100
test/fixtures/objective-c/SBJsonParser.m
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
/*
|
||||
Copyright (C) 2009,2010 Stig Brautaset. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the author nor the names of its contributors may be used
|
||||
to endorse or promote products derived from this software without specific
|
||||
prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#import "SBJsonParser.h"
|
||||
#import "SBJsonStreamParser.h"
|
||||
#import "SBJsonStreamParserAdapter.h"
|
||||
#import "SBJsonStreamParserAccumulator.h"
|
||||
|
||||
@implementation SBJsonParser
|
||||
|
||||
@synthesize maxDepth;
|
||||
@synthesize error;
|
||||
|
||||
- (id)init {
|
||||
self = [super init];
|
||||
if (self)
|
||||
self.maxDepth = 32u;
|
||||
return self;
|
||||
}
|
||||
|
||||
|
||||
#pragma mark Methods
|
||||
|
||||
- (id)objectWithData:(NSData *)data {
|
||||
|
||||
if (!data) {
|
||||
self.error = @"Input was 'nil'";
|
||||
return nil;
|
||||
}
|
||||
|
||||
SBJsonStreamParserAccumulator *accumulator = [[SBJsonStreamParserAccumulator alloc] init];
|
||||
|
||||
SBJsonStreamParserAdapter *adapter = [[SBJsonStreamParserAdapter alloc] init];
|
||||
adapter.delegate = accumulator;
|
||||
|
||||
SBJsonStreamParser *parser = [[SBJsonStreamParser alloc] init];
|
||||
parser.maxDepth = self.maxDepth;
|
||||
parser.delegate = adapter;
|
||||
|
||||
switch ([parser parse:data]) {
|
||||
case SBJsonStreamParserComplete:
|
||||
return accumulator.value;
|
||||
break;
|
||||
|
||||
case SBJsonStreamParserWaitingForData:
|
||||
self.error = @"Unexpected end of input";
|
||||
break;
|
||||
|
||||
case SBJsonStreamParserError:
|
||||
self.error = parser.error;
|
||||
break;
|
||||
}
|
||||
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (id)objectWithString:(NSString *)repr {
|
||||
return [self objectWithData:[repr dataUsingEncoding:NSUTF8StringEncoding]];
|
||||
}
|
||||
|
||||
- (id)objectWithString:(NSString*)repr error:(NSError**)error_ {
|
||||
id tmp = [self objectWithString:repr];
|
||||
if (tmp)
|
||||
return tmp;
|
||||
|
||||
if (error_) {
|
||||
NSDictionary *ui = [NSDictionary dictionaryWithObjectsAndKeys:error, NSLocalizedDescriptionKey, nil];
|
||||
*error_ = [NSError errorWithDomain:@"org.brautaset.SBJsonParser.ErrorDomain" code:0 userInfo:ui];
|
||||
}
|
||||
|
||||
return nil;
|
||||
}
|
||||
|
||||
@end
|
||||
26
test/fixtures/objective-c/StyleViewController.h
vendored
Normal file
26
test/fixtures/objective-c/StyleViewController.h
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
//
|
||||
// Copyright 2009-2011 Facebook
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
@interface StyleViewController : TTViewController {
|
||||
@private
|
||||
TTStyle* _style;
|
||||
TTStyle* _styleHighlight;
|
||||
TTStyle* _styleDisabled;
|
||||
TTStyle* _styleSelected;
|
||||
NSString* _styleType;
|
||||
}
|
||||
|
||||
@end
|
||||
185
test/fixtures/objective-c/StyleViewController.m
vendored
Normal file
185
test/fixtures/objective-c/StyleViewController.m
vendored
Normal file
@@ -0,0 +1,185 @@
|
||||
//
|
||||
// Copyright 2009-2011 Facebook
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
#import "StyleViewController.h"
|
||||
|
||||
#import "StyleView.h"
|
||||
|
||||
NSString* kTextStyleType = @"text";
|
||||
NSString* kViewStyleType = @"view";
|
||||
NSString* kImageStyleType = @"image";
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
@implementation StyleViewController
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
- (id)initWithStyleName:(NSString*)name styleType:(NSString*)styleType {
|
||||
if (self = [super initWithNibName:nil bundle:nil]) {
|
||||
self.title = name;
|
||||
|
||||
_style = [[[TTStyleSheet globalStyleSheet] styleWithSelector:name] retain];
|
||||
_styleHighlight = [[[TTStyleSheet globalStyleSheet]
|
||||
styleWithSelector: name
|
||||
forState: UIControlStateHighlighted] retain];
|
||||
_styleDisabled = [[[TTStyleSheet globalStyleSheet]
|
||||
styleWithSelector: name
|
||||
forState: UIControlStateDisabled] retain];
|
||||
_styleSelected = [[[TTStyleSheet globalStyleSheet]
|
||||
styleWithSelector: name
|
||||
forState: UIControlStateSelected] retain];
|
||||
|
||||
_styleType = [styleType copy];
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
- (void)dealloc {
|
||||
TT_RELEASE_SAFELY(_style);
|
||||
TT_RELEASE_SAFELY(_styleHighlight);
|
||||
TT_RELEASE_SAFELY(_styleDisabled);
|
||||
TT_RELEASE_SAFELY(_styleSelected);
|
||||
TT_RELEASE_SAFELY(_styleType);
|
||||
|
||||
[super dealloc];
|
||||
}
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
#pragma mark -
|
||||
#pragma mark UIViewController
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
- (void)addTextView:(NSString*)title frame:(CGRect)frame style:(TTStyle*)style {
|
||||
CGRect textFrame = TTRectInset(frame, UIEdgeInsetsMake(20, 20, 20, 20));
|
||||
StyleView* text = [[StyleView alloc]
|
||||
initWithFrame:textFrame];
|
||||
text.text = title;
|
||||
TTStyleContext* context = [[TTStyleContext alloc] init];
|
||||
context.frame = frame;
|
||||
context.delegate = text;
|
||||
context.font = [UIFont systemFontOfSize:[UIFont systemFontSize]];
|
||||
CGSize size = [style addToSize:CGSizeZero context:context];
|
||||
TT_RELEASE_SAFELY(context);
|
||||
|
||||
size.width += 20;
|
||||
size.height += 20;
|
||||
textFrame.size = size;
|
||||
text.frame = textFrame;
|
||||
|
||||
text.style = style;
|
||||
text.backgroundColor = [UIColor colorWithRed:0.9 green:0.9 blue:1 alpha:1];
|
||||
text.autoresizingMask =
|
||||
UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleBottomMargin;
|
||||
|
||||
[self.view addSubview:text];
|
||||
TT_RELEASE_SAFELY(text);
|
||||
}
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
- (void)addView:(CGRect)frame style:(TTStyle*)style {
|
||||
CGRect viewFrame = TTRectInset(frame, UIEdgeInsetsMake(20, 20, 20, 20));
|
||||
StyleView* view = [[StyleView alloc]
|
||||
initWithFrame:viewFrame];
|
||||
|
||||
view.style = style;
|
||||
view.backgroundColor = [UIColor colorWithRed:0.9 green:0.9 blue:1 alpha:1];
|
||||
view.autoresizingMask =
|
||||
UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleBottomMargin;
|
||||
|
||||
[self.view addSubview:view];
|
||||
TT_RELEASE_SAFELY(view);
|
||||
}
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
- (void)addImageView:(CGRect)frame style:(TTStyle*)style {
|
||||
CGRect viewFrame = TTRectInset(frame, UIEdgeInsetsMake(20, 20, 20, 20));
|
||||
TTImageView* view = [[TTImageView alloc]
|
||||
initWithFrame:viewFrame];
|
||||
|
||||
view.urlPath = @"bundle://Icon.png";
|
||||
view.style = style;
|
||||
view.backgroundColor = [UIColor colorWithRed:0.9 green:0.9 blue:1 alpha:1];
|
||||
view.autoresizingMask =
|
||||
UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleBottomMargin;
|
||||
CGRect imageFrame = view.frame;
|
||||
imageFrame.size = view.image.size;
|
||||
view.frame = imageFrame;
|
||||
|
||||
[self.view addSubview:view];
|
||||
TT_RELEASE_SAFELY(view);
|
||||
}
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
- (void)loadView {
|
||||
[super loadView];
|
||||
|
||||
CGRect frame = self.view.bounds;
|
||||
frame.size.height /= 4;
|
||||
|
||||
if ([_styleType isEqualToString:kTextStyleType]) {
|
||||
[self addTextView:@"UIControlStateNormal" frame:frame style:_style];
|
||||
|
||||
frame.origin.y += frame.size.height;
|
||||
[self addTextView:@"UIControlStateHighlighted" frame:frame style:_styleHighlight];
|
||||
|
||||
frame.origin.y += frame.size.height;
|
||||
[self addTextView:@"UIControlStateDisabled" frame:frame style:_styleDisabled];
|
||||
|
||||
frame.origin.y += frame.size.height;
|
||||
[self addTextView:@"UIControlStateSelected" frame:frame style:_styleSelected];
|
||||
|
||||
} else if ([_styleType isEqualToString:kViewStyleType]) {
|
||||
[self addView:frame style:_style];
|
||||
|
||||
frame.origin.y += frame.size.height;
|
||||
[self addView:frame style:_styleHighlight];
|
||||
|
||||
frame.origin.y += frame.size.height;
|
||||
[self addView:frame style:_styleDisabled];
|
||||
|
||||
frame.origin.y += frame.size.height;
|
||||
[self addView:frame style:_styleSelected];
|
||||
|
||||
} else if ([_styleType isEqualToString:kImageStyleType]) {
|
||||
[self addImageView:frame style:_style];
|
||||
|
||||
frame.origin.y += frame.size.height;
|
||||
[self addImageView:frame style:_styleHighlight];
|
||||
|
||||
frame.origin.y += frame.size.height;
|
||||
[self addImageView:frame style:_styleDisabled];
|
||||
|
||||
frame.origin.y += frame.size.height;
|
||||
[self addImageView:frame style:_styleSelected];
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@end
|
||||
|
||||
217
test/fixtures/objective-c/TUITableView.h
vendored
Normal file
217
test/fixtures/objective-c/TUITableView.h
vendored
Normal file
@@ -0,0 +1,217 @@
|
||||
/*
|
||||
Copyright 2011 Twitter, Inc.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this work except in compliance with the License.
|
||||
You may obtain a copy of the License in the LICENSE file, or at:
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
#import "TUIScrollView.h"
|
||||
#import "TUIFastIndexPath.h"
|
||||
|
||||
typedef enum {
|
||||
TUITableViewStylePlain, // regular table view
|
||||
TUITableViewStyleGrouped, // grouped table view—headers stick to the top of the table view and scroll with it
|
||||
} TUITableViewStyle;
|
||||
|
||||
typedef enum {
|
||||
TUITableViewScrollPositionNone,
|
||||
TUITableViewScrollPositionTop,
|
||||
TUITableViewScrollPositionMiddle,
|
||||
TUITableViewScrollPositionBottom,
|
||||
TUITableViewScrollPositionToVisible, // currently the only supported arg
|
||||
} TUITableViewScrollPosition;
|
||||
|
||||
typedef enum {
|
||||
TUITableViewInsertionMethodBeforeIndex = NSOrderedAscending,
|
||||
TUITableViewInsertionMethodAtIndex = NSOrderedSame,
|
||||
TUITableViewInsertionMethodAfterIndex = NSOrderedDescending
|
||||
} TUITableViewInsertionMethod;
|
||||
|
||||
@class TUITableViewCell;
|
||||
@protocol TUITableViewDataSource;
|
||||
|
||||
@class TUITableView;
|
||||
|
||||
@protocol TUITableViewDelegate<NSObject, TUIScrollViewDelegate>
|
||||
|
||||
- (CGFloat)tableView:(TUITableView *)tableView heightForRowAtIndexPath:(TUIFastIndexPath *)indexPath;
|
||||
|
||||
@optional
|
||||
|
||||
- (void)tableView:(TUITableView *)tableView willDisplayCell:(TUITableViewCell *)cell forRowAtIndexPath:(TUIFastIndexPath *)indexPath; // called after the cell's frame has been set but before it's added as a subview
|
||||
- (void)tableView:(TUITableView *)tableView didSelectRowAtIndexPath:(TUIFastIndexPath *)indexPath; // happens on left/right mouse down, key up/down
|
||||
- (void)tableView:(TUITableView *)tableView didDeselectRowAtIndexPath:(TUIFastIndexPath *)indexPath;
|
||||
- (void)tableView:(TUITableView *)tableView didClickRowAtIndexPath:(TUIFastIndexPath *)indexPath withEvent:(NSEvent *)event; // happens on left/right mouse up (can look at clickCount)
|
||||
|
||||
- (BOOL)tableView:(TUITableView*)tableView shouldSelectRowAtIndexPath:(TUIFastIndexPath*)indexPath forEvent:(NSEvent*)event; // YES, if not implemented
|
||||
- (NSMenu *)tableView:(TUITableView *)tableView menuForRowAtIndexPath:(TUIFastIndexPath *)indexPath withEvent:(NSEvent *)event;
|
||||
|
||||
// the following are good places to update or restore state (such as selection) when the table data reloads
|
||||
- (void)tableViewWillReloadData:(TUITableView *)tableView;
|
||||
- (void)tableViewDidReloadData:(TUITableView *)tableView;
|
||||
|
||||
// the following is optional for row reordering
|
||||
- (TUIFastIndexPath *)tableView:(TUITableView *)tableView targetIndexPathForMoveFromRowAtIndexPath:(TUIFastIndexPath *)fromPath toProposedIndexPath:(TUIFastIndexPath *)proposedPath;
|
||||
|
||||
@end
|
||||
|
||||
@interface TUITableView : TUIScrollView
|
||||
{
|
||||
TUITableViewStyle _style;
|
||||
__unsafe_unretained id <TUITableViewDataSource> _dataSource; // weak
|
||||
NSArray * _sectionInfo;
|
||||
|
||||
TUIView * _pullDownView;
|
||||
TUIView * _headerView;
|
||||
|
||||
CGSize _lastSize;
|
||||
CGFloat _contentHeight;
|
||||
|
||||
NSMutableIndexSet * _visibleSectionHeaders;
|
||||
NSMutableDictionary * _visibleItems;
|
||||
NSMutableDictionary * _reusableTableCells;
|
||||
|
||||
TUIFastIndexPath * _selectedIndexPath;
|
||||
TUIFastIndexPath * _indexPathShouldBeFirstResponder;
|
||||
NSInteger _futureMakeFirstResponderToken;
|
||||
TUIFastIndexPath * _keepVisibleIndexPathForReload;
|
||||
CGFloat _relativeOffsetForReload;
|
||||
|
||||
// drag-to-reorder state
|
||||
TUITableViewCell * _dragToReorderCell;
|
||||
CGPoint _currentDragToReorderLocation;
|
||||
CGPoint _currentDragToReorderMouseOffset;
|
||||
TUIFastIndexPath * _currentDragToReorderIndexPath;
|
||||
TUITableViewInsertionMethod _currentDragToReorderInsertionMethod;
|
||||
TUIFastIndexPath * _previousDragToReorderIndexPath;
|
||||
TUITableViewInsertionMethod _previousDragToReorderInsertionMethod;
|
||||
|
||||
struct {
|
||||
unsigned int animateSelectionChanges:1;
|
||||
unsigned int forceSaveScrollPosition:1;
|
||||
unsigned int derepeaterEnabled:1;
|
||||
unsigned int layoutSubviewsReentrancyGuard:1;
|
||||
unsigned int didFirstLayout:1;
|
||||
unsigned int dataSourceNumberOfSectionsInTableView:1;
|
||||
unsigned int delegateTableViewWillDisplayCellForRowAtIndexPath:1;
|
||||
unsigned int maintainContentOffsetAfterReload:1;
|
||||
} _tableFlags;
|
||||
|
||||
}
|
||||
|
||||
- (id)initWithFrame:(CGRect)frame style:(TUITableViewStyle)style; // must specify style at creation. -initWithFrame: calls this with UITableViewStylePlain
|
||||
|
||||
@property (nonatomic,unsafe_unretained) id <TUITableViewDataSource> dataSource;
|
||||
@property (nonatomic,unsafe_unretained) id <TUITableViewDelegate> delegate;
|
||||
|
||||
@property (readwrite, assign) BOOL animateSelectionChanges;
|
||||
@property (nonatomic, assign) BOOL maintainContentOffsetAfterReload;
|
||||
|
||||
- (void)reloadData;
|
||||
|
||||
/**
|
||||
The table view itself has mechanisms for maintaining scroll position. During a live resize the table view should automatically "do the right thing". This method may be useful during a reload if you want to stay in the same spot. Use it instead of -reloadData.
|
||||
*/
|
||||
- (void)reloadDataMaintainingVisibleIndexPath:(TUIFastIndexPath *)indexPath relativeOffset:(CGFloat)relativeOffset;
|
||||
|
||||
// Forces a re-calculation and re-layout of the table. This is most useful for animating the relayout. It is potentially _more_ expensive than -reloadData since it has to allow for animating.
|
||||
- (void)reloadLayout;
|
||||
|
||||
- (NSInteger)numberOfSections;
|
||||
- (NSInteger)numberOfRowsInSection:(NSInteger)section;
|
||||
|
||||
- (CGRect)rectForHeaderOfSection:(NSInteger)section;
|
||||
- (CGRect)rectForSection:(NSInteger)section;
|
||||
- (CGRect)rectForRowAtIndexPath:(TUIFastIndexPath *)indexPath;
|
||||
|
||||
- (NSIndexSet *)indexesOfSectionsInRect:(CGRect)rect;
|
||||
- (NSIndexSet *)indexesOfSectionHeadersInRect:(CGRect)rect;
|
||||
- (TUIFastIndexPath *)indexPathForCell:(TUITableViewCell *)cell; // returns nil if cell is not visible
|
||||
- (NSArray *)indexPathsForRowsInRect:(CGRect)rect; // returns nil if rect not valid
|
||||
- (TUIFastIndexPath *)indexPathForRowAtPoint:(CGPoint)point;
|
||||
- (TUIFastIndexPath *)indexPathForRowAtVerticalOffset:(CGFloat)offset;
|
||||
- (NSInteger)indexOfSectionWithHeaderAtPoint:(CGPoint)point;
|
||||
- (NSInteger)indexOfSectionWithHeaderAtVerticalOffset:(CGFloat)offset;
|
||||
|
||||
- (void)enumerateIndexPathsUsingBlock:(void (^)(TUIFastIndexPath *indexPath, BOOL *stop))block;
|
||||
- (void)enumerateIndexPathsWithOptions:(NSEnumerationOptions)options usingBlock:(void (^)(TUIFastIndexPath *indexPath, BOOL *stop))block;
|
||||
- (void)enumerateIndexPathsFromIndexPath:(TUIFastIndexPath *)fromIndexPath toIndexPath:(TUIFastIndexPath *)toIndexPath withOptions:(NSEnumerationOptions)options usingBlock:(void (^)(TUIFastIndexPath *indexPath, BOOL *stop))block;
|
||||
|
||||
- (TUIView *)headerViewForSection:(NSInteger)section;
|
||||
- (TUITableViewCell *)cellForRowAtIndexPath:(TUIFastIndexPath *)indexPath; // returns nil if cell is not visible or index path is out of range
|
||||
- (NSArray *)visibleCells; // no particular order
|
||||
- (NSArray *)sortedVisibleCells; // top to bottom
|
||||
- (NSArray *)indexPathsForVisibleRows;
|
||||
|
||||
- (void)scrollToRowAtIndexPath:(TUIFastIndexPath *)indexPath atScrollPosition:(TUITableViewScrollPosition)scrollPosition animated:(BOOL)animated;
|
||||
|
||||
- (TUIFastIndexPath *)indexPathForSelectedRow; // return nil or index path representing section and row of selection.
|
||||
- (TUIFastIndexPath *)indexPathForFirstRow;
|
||||
- (TUIFastIndexPath *)indexPathForLastRow;
|
||||
|
||||
- (void)selectRowAtIndexPath:(TUIFastIndexPath *)indexPath animated:(BOOL)animated scrollPosition:(TUITableViewScrollPosition)scrollPosition;
|
||||
- (void)deselectRowAtIndexPath:(TUIFastIndexPath *)indexPath animated:(BOOL)animated;
|
||||
|
||||
/**
|
||||
Above the top cell, only visible if you pull down (if you have scroll bouncing enabled)
|
||||
*/
|
||||
@property (nonatomic, strong) TUIView *pullDownView;
|
||||
|
||||
- (BOOL)pullDownViewIsVisible;
|
||||
|
||||
@property (nonatomic, strong) TUIView *headerView;
|
||||
|
||||
/**
|
||||
Used by the delegate to acquire an already allocated cell, in lieu of allocating a new one.
|
||||
*/
|
||||
- (TUITableViewCell *)dequeueReusableCellWithIdentifier:(NSString *)identifier;
|
||||
|
||||
@end
|
||||
|
||||
@protocol TUITableViewDataSource<NSObject>
|
||||
|
||||
@required
|
||||
|
||||
- (NSInteger)tableView:(TUITableView *)table numberOfRowsInSection:(NSInteger)section;
|
||||
|
||||
- (TUITableViewCell *)tableView:(TUITableView *)tableView cellForRowAtIndexPath:(TUIFastIndexPath *)indexPath;
|
||||
|
||||
@optional
|
||||
|
||||
- (TUIView *)tableView:(TUITableView *)tableView headerViewForSection:(NSInteger)section;
|
||||
|
||||
// the following are required to support row reordering
|
||||
- (BOOL)tableView:(TUITableView *)tableView canMoveRowAtIndexPath:(TUIFastIndexPath *)indexPath;
|
||||
- (void)tableView:(TUITableView *)tableView moveRowAtIndexPath:(TUIFastIndexPath *)fromIndexPath toIndexPath:(TUIFastIndexPath *)toIndexPath;
|
||||
|
||||
// the following are required to support row reordering
|
||||
- (BOOL)tableView:(TUITableView *)tableView canMoveRowAtIndexPath:(TUIFastIndexPath *)indexPath;
|
||||
- (void)tableView:(TUITableView *)tableView moveRowAtIndexPath:(TUIFastIndexPath *)fromIndexPath toIndexPath:(TUIFastIndexPath *)toIndexPath;
|
||||
|
||||
/**
|
||||
Default is 1 if not implemented
|
||||
*/
|
||||
- (NSInteger)numberOfSectionsInTableView:(TUITableView *)tableView;
|
||||
|
||||
@end
|
||||
|
||||
@interface NSIndexPath (TUITableView)
|
||||
|
||||
+ (NSIndexPath *)indexPathForRow:(NSUInteger)row inSection:(NSUInteger)section;
|
||||
|
||||
@property(nonatomic,readonly) NSUInteger section;
|
||||
@property(nonatomic,readonly) NSUInteger row;
|
||||
|
||||
@end
|
||||
|
||||
#import "TUITableViewCell.h"
|
||||
#import "TUITableView+Derepeater.h"
|
||||
1248
test/fixtures/objective-c/TUITableView.m
vendored
Normal file
1248
test/fixtures/objective-c/TUITableView.m
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1684
test/fixtures/perl/Ack.pm
vendored
Normal file
1684
test/fixtures/perl/Ack.pm
vendored
Normal file
File diff suppressed because it is too large
Load Diff
695
test/fixtures/perl/Request.pm
vendored
Normal file
695
test/fixtures/perl/Request.pm
vendored
Normal file
@@ -0,0 +1,695 @@
|
||||
package Plack::Request;
|
||||
use strict;
|
||||
use warnings;
|
||||
use 5.008_001;
|
||||
our $VERSION = '0.9988';
|
||||
$VERSION = eval $VERSION;
|
||||
|
||||
use HTTP::Headers;
|
||||
use Carp ();
|
||||
use Hash::MultiValue;
|
||||
use HTTP::Body;
|
||||
|
||||
use Plack::Request::Upload;
|
||||
use Plack::TempBuffer;
|
||||
use URI;
|
||||
use URI::Escape ();
|
||||
|
||||
sub _deprecated {
|
||||
my $alt = shift;
|
||||
my $method = (caller(1))[3];
|
||||
Carp::carp("$method is deprecated. Use '$alt' instead.");
|
||||
}
|
||||
|
||||
sub new {
|
||||
my($class, $env) = @_;
|
||||
Carp::croak(q{$env is required})
|
||||
unless defined $env && ref($env) eq 'HASH';
|
||||
|
||||
bless { env => $env }, $class;
|
||||
}
|
||||
|
||||
sub env { $_[0]->{env} }
|
||||
|
||||
sub address { $_[0]->env->{REMOTE_ADDR} }
|
||||
sub remote_host { $_[0]->env->{REMOTE_HOST} }
|
||||
sub protocol { $_[0]->env->{SERVER_PROTOCOL} }
|
||||
sub method { $_[0]->env->{REQUEST_METHOD} }
|
||||
sub port { $_[0]->env->{SERVER_PORT} }
|
||||
sub user { $_[0]->env->{REMOTE_USER} }
|
||||
sub request_uri { $_[0]->env->{REQUEST_URI} }
|
||||
sub path_info { $_[0]->env->{PATH_INFO} }
|
||||
sub path { $_[0]->env->{PATH_INFO} || '/' }
|
||||
sub script_name { $_[0]->env->{SCRIPT_NAME} }
|
||||
sub scheme { $_[0]->env->{'psgi.url_scheme'} }
|
||||
sub secure { $_[0]->scheme eq 'https' }
|
||||
sub body { $_[0]->env->{'psgi.input'} }
|
||||
sub input { $_[0]->env->{'psgi.input'} }
|
||||
|
||||
sub content_length { $_[0]->env->{CONTENT_LENGTH} }
|
||||
sub content_type { $_[0]->env->{CONTENT_TYPE} }
|
||||
|
||||
sub session { $_[0]->env->{'psgix.session'} }
|
||||
sub session_options { $_[0]->env->{'psgix.session.options'} }
|
||||
sub logger { $_[0]->env->{'psgix.logger'} }
|
||||
|
||||
sub cookies {
|
||||
my $self = shift;
|
||||
|
||||
return {} unless $self->env->{HTTP_COOKIE};
|
||||
|
||||
# HTTP_COOKIE hasn't changed: reuse the parsed cookie
|
||||
if ( $self->env->{'plack.cookie.parsed'}
|
||||
&& $self->env->{'plack.cookie.string'} eq $self->env->{HTTP_COOKIE}) {
|
||||
return $self->env->{'plack.cookie.parsed'};
|
||||
}
|
||||
|
||||
$self->env->{'plack.cookie.string'} = $self->env->{HTTP_COOKIE};
|
||||
|
||||
my %results;
|
||||
my @pairs = grep /=/, split "[;,] ?", $self->env->{'plack.cookie.string'};
|
||||
for my $pair ( @pairs ) {
|
||||
# trim leading trailing whitespace
|
||||
$pair =~ s/^\s+//; $pair =~ s/\s+$//;
|
||||
|
||||
my ($key, $value) = map URI::Escape::uri_unescape($_), split( "=", $pair, 2 );
|
||||
|
||||
# Take the first one like CGI.pm or rack do
|
||||
$results{$key} = $value unless exists $results{$key};
|
||||
}
|
||||
|
||||
$self->env->{'plack.cookie.parsed'} = \%results;
|
||||
}
|
||||
|
||||
sub query_parameters {
|
||||
my $self = shift;
|
||||
$self->env->{'plack.request.query'} ||= Hash::MultiValue->new($self->uri->query_form);
|
||||
}
|
||||
|
||||
sub content {
|
||||
my $self = shift;
|
||||
|
||||
unless ($self->env->{'psgix.input.buffered'}) {
|
||||
$self->_parse_request_body;
|
||||
}
|
||||
|
||||
my $fh = $self->input or return '';
|
||||
my $cl = $self->env->{CONTENT_LENGTH} or return'';
|
||||
$fh->read(my($content), $cl, 0);
|
||||
$fh->seek(0, 0);
|
||||
|
||||
return $content;
|
||||
}
|
||||
|
||||
sub raw_body { $_[0]->content }
|
||||
|
||||
# XXX you can mutate headers with ->headers but it's not written through to the env
|
||||
|
||||
sub headers {
|
||||
my $self = shift;
|
||||
if (!defined $self->{headers}) {
|
||||
my $env = $self->env;
|
||||
$self->{headers} = HTTP::Headers->new(
|
||||
map {
|
||||
(my $field = $_) =~ s/^HTTPS?_//;
|
||||
( $field => $env->{$_} );
|
||||
}
|
||||
grep { /^(?:HTTP|CONTENT|COOKIE)/i } keys %$env
|
||||
);
|
||||
}
|
||||
$self->{headers};
|
||||
}
|
||||
|
||||
sub content_encoding { shift->headers->content_encoding(@_) }
|
||||
sub header { shift->headers->header(@_) }
|
||||
sub referer { shift->headers->referer(@_) }
|
||||
sub user_agent { shift->headers->user_agent(@_) }
|
||||
|
||||
sub body_parameters {
|
||||
my $self = shift;
|
||||
|
||||
unless ($self->env->{'plack.request.body'}) {
|
||||
$self->_parse_request_body;
|
||||
}
|
||||
|
||||
return $self->env->{'plack.request.body'};
|
||||
}
|
||||
|
||||
# contains body + query
|
||||
sub parameters {
|
||||
my $self = shift;
|
||||
|
||||
$self->env->{'plack.request.merged'} ||= do {
|
||||
my $query = $self->query_parameters;
|
||||
my $body = $self->body_parameters;
|
||||
Hash::MultiValue->new($query->flatten, $body->flatten);
|
||||
};
|
||||
}
|
||||
|
||||
sub uploads {
|
||||
my $self = shift;
|
||||
|
||||
if ($self->env->{'plack.request.upload'}) {
|
||||
return $self->env->{'plack.request.upload'};
|
||||
}
|
||||
|
||||
$self->_parse_request_body;
|
||||
return $self->env->{'plack.request.upload'};
|
||||
}
|
||||
|
||||
sub hostname { _deprecated 'remote_host'; $_[0]->remote_host || $_[0]->address }
|
||||
sub url_scheme { _deprecated 'scheme'; $_[0]->scheme }
|
||||
sub params { _deprecated 'parameters'; shift->parameters(@_) }
|
||||
sub query_params { _deprecated 'query_parameters'; shift->query_parameters(@_) }
|
||||
sub body_params { _deprecated 'body_parameters'; shift->body_parameters(@_) }
|
||||
|
||||
sub cookie {
|
||||
my $self = shift;
|
||||
_deprecated 'cookies';
|
||||
|
||||
return keys %{ $self->cookies } if @_ == 0;
|
||||
|
||||
my $name = shift;
|
||||
return $self->cookies->{$name};
|
||||
}
|
||||
|
||||
sub param {
|
||||
my $self = shift;
|
||||
|
||||
return keys %{ $self->parameters } if @_ == 0;
|
||||
|
||||
my $key = shift;
|
||||
return $self->parameters->{$key} unless wantarray;
|
||||
return $self->parameters->get_all($key);
|
||||
}
|
||||
|
||||
sub upload {
|
||||
my $self = shift;
|
||||
|
||||
return keys %{ $self->uploads } if @_ == 0;
|
||||
|
||||
my $key = shift;
|
||||
return $self->uploads->{$key} unless wantarray;
|
||||
return $self->uploads->get_all($key);
|
||||
}
|
||||
|
||||
sub raw_uri {
|
||||
my $self = shift;
|
||||
_deprecated 'base';
|
||||
|
||||
my $base = $self->base;
|
||||
$base->path_query($self->env->{REQUEST_URI});
|
||||
|
||||
$base;
|
||||
}
|
||||
|
||||
sub uri {
|
||||
my $self = shift;
|
||||
|
||||
my $base = $self->_uri_base;
|
||||
|
||||
# We have to escape back PATH_INFO in case they include stuff like
|
||||
# ? or # so that the URI parser won't be tricked. However we should
|
||||
# preserve '/' since encoding them into %2f doesn't make sense.
|
||||
# This means when a request like /foo%2fbar comes in, we recognize
|
||||
# it as /foo/bar which is not ideal, but that's how the PSGI PATH_INFO
|
||||
# spec goes and we can't do anything about it. See PSGI::FAQ for details.
|
||||
# http://github.com/miyagawa/Plack/issues#issue/118
|
||||
my $path_escape_class = '^A-Za-z0-9\-\._~/';
|
||||
|
||||
my $path = URI::Escape::uri_escape($self->env->{PATH_INFO} || '', $path_escape_class);
|
||||
$path .= '?' . $self->env->{QUERY_STRING}
|
||||
if defined $self->env->{QUERY_STRING} && $self->env->{QUERY_STRING} ne '';
|
||||
|
||||
$base =~ s!/$!! if $path =~ m!^/!;
|
||||
|
||||
return URI->new($base . $path)->canonical;
|
||||
}
|
||||
|
||||
sub base {
|
||||
my $self = shift;
|
||||
URI->new($self->_uri_base)->canonical;
|
||||
}
|
||||
|
||||
sub _uri_base {
|
||||
my $self = shift;
|
||||
|
||||
my $env = $self->env;
|
||||
|
||||
my $uri = ($env->{'psgi.url_scheme'} || "http") .
|
||||
"://" .
|
||||
($env->{HTTP_HOST} || (($env->{SERVER_NAME} || "") . ":" . ($env->{SERVER_PORT} || 80))) .
|
||||
($env->{SCRIPT_NAME} || '/');
|
||||
|
||||
return $uri;
|
||||
}
|
||||
|
||||
sub new_response {
|
||||
my $self = shift;
|
||||
require Plack::Response;
|
||||
Plack::Response->new(@_);
|
||||
}
|
||||
|
||||
sub _parse_request_body {
|
||||
my $self = shift;
|
||||
|
||||
my $ct = $self->env->{CONTENT_TYPE};
|
||||
my $cl = $self->env->{CONTENT_LENGTH};
|
||||
if (!$ct && !$cl) {
|
||||
# No Content-Type nor Content-Length -> GET/HEAD
|
||||
$self->env->{'plack.request.body'} = Hash::MultiValue->new;
|
||||
$self->env->{'plack.request.upload'} = Hash::MultiValue->new;
|
||||
return;
|
||||
}
|
||||
|
||||
my $body = HTTP::Body->new($ct, $cl);
|
||||
|
||||
# HTTP::Body will create temporary files in case there was an
|
||||
# upload. Those temporary files can be cleaned up by telling
|
||||
# HTTP::Body to do so. It will run the cleanup when the request
|
||||
# env is destroyed. That the object will not go out of scope by
|
||||
# the end of this sub we will store a reference here.
|
||||
$self->env->{'plack.request.http.body'} = $body;
|
||||
$body->cleanup(1);
|
||||
|
||||
my $input = $self->input;
|
||||
|
||||
my $buffer;
|
||||
if ($self->env->{'psgix.input.buffered'}) {
|
||||
# Just in case if input is read by middleware/apps beforehand
|
||||
$input->seek(0, 0);
|
||||
} else {
|
||||
$buffer = Plack::TempBuffer->new($cl);
|
||||
}
|
||||
|
||||
my $spin = 0;
|
||||
while ($cl) {
|
||||
$input->read(my $chunk, $cl < 8192 ? $cl : 8192);
|
||||
my $read = length $chunk;
|
||||
$cl -= $read;
|
||||
$body->add($chunk);
|
||||
$buffer->print($chunk) if $buffer;
|
||||
|
||||
if ($read == 0 && $spin++ > 2000) {
|
||||
Carp::croak "Bad Content-Length: maybe client disconnect? ($cl bytes remaining)";
|
||||
}
|
||||
}
|
||||
|
||||
if ($buffer) {
|
||||
$self->env->{'psgix.input.buffered'} = 1;
|
||||
$self->env->{'psgi.input'} = $buffer->rewind;
|
||||
} else {
|
||||
$input->seek(0, 0);
|
||||
}
|
||||
|
||||
$self->env->{'plack.request.body'} = Hash::MultiValue->from_mixed($body->param);
|
||||
|
||||
my @uploads = Hash::MultiValue->from_mixed($body->upload)->flatten;
|
||||
my @obj;
|
||||
while (my($k, $v) = splice @uploads, 0, 2) {
|
||||
push @obj, $k, $self->_make_upload($v);
|
||||
}
|
||||
|
||||
$self->env->{'plack.request.upload'} = Hash::MultiValue->new(@obj);
|
||||
|
||||
1;
|
||||
}
|
||||
|
||||
sub _make_upload {
|
||||
my($self, $upload) = @_;
|
||||
my %copy = %$upload;
|
||||
$copy{headers} = HTTP::Headers->new(%{$upload->{headers}});
|
||||
Plack::Request::Upload->new(%copy);
|
||||
}
|
||||
|
||||
1;
|
||||
__END__
|
||||
|
||||
=head1 NAME
|
||||
|
||||
Plack::Request - Portable HTTP request object from PSGI env hash
|
||||
|
||||
=head1 SYNOPSIS
|
||||
|
||||
use Plack::Request;
|
||||
|
||||
my $app_or_middleware = sub {
|
||||
my $env = shift; # PSGI env
|
||||
|
||||
my $req = Plack::Request->new($env);
|
||||
|
||||
my $path_info = $req->path_info;
|
||||
my $query = $req->param('query');
|
||||
|
||||
my $res = $req->new_response(200); # new Plack::Response
|
||||
$res->finalize;
|
||||
};
|
||||
|
||||
=head1 DESCRIPTION
|
||||
|
||||
L<Plack::Request> provides a consistent API for request objects across
|
||||
web server environments.
|
||||
|
||||
=head1 CAVEAT
|
||||
|
||||
Note that this module is intended to be used by Plack middleware
|
||||
developers and web application framework developers rather than
|
||||
application developers (end users).
|
||||
|
||||
Writing your web application directly using Plack::Request is
|
||||
certainly possible but not recommended: it's like doing so with
|
||||
mod_perl's Apache::Request: yet too low level.
|
||||
|
||||
If you're writing a web application, not a framework, then you're
|
||||
encouraged to use one of the web application frameworks that support PSGI (L<http://plackperl.org/#frameworks>),
|
||||
or see modules like L<HTTP::Engine> to provide higher level
|
||||
Request and Response API on top of PSGI.
|
||||
|
||||
=head1 METHODS
|
||||
|
||||
Some of the methods defined in the earlier versions are deprecated in
|
||||
version 0.99. Take a look at L</"INCOMPATIBILITIES">.
|
||||
|
||||
Unless otherwise noted, all methods and attributes are B<read-only>,
|
||||
and passing values to the method like an accessor doesn't work like
|
||||
you expect it to.
|
||||
|
||||
=head2 new
|
||||
|
||||
Plack::Request->new( $env );
|
||||
|
||||
Creates a new request object.
|
||||
|
||||
=head1 ATTRIBUTES
|
||||
|
||||
=over 4
|
||||
|
||||
=item env
|
||||
|
||||
Returns the shared PSGI environment hash reference. This is a
|
||||
reference, so writing to this environment passes through during the
|
||||
whole PSGI request/response cycle.
|
||||
|
||||
=item address
|
||||
|
||||
Returns the IP address of the client (C<REMOTE_ADDR>).
|
||||
|
||||
=item remote_host
|
||||
|
||||
Returns the remote host (C<REMOTE_HOST>) of the client. It may be
|
||||
empty, in which case you have to get the IP address using C<address>
|
||||
method and resolve on your own.
|
||||
|
||||
=item method
|
||||
|
||||
Contains the request method (C<GET>, C<POST>, C<HEAD>, etc).
|
||||
|
||||
=item protocol
|
||||
|
||||
Returns the protocol (HTTP/1.0 or HTTP/1.1) used for the current request.
|
||||
|
||||
=item request_uri
|
||||
|
||||
Returns the raw, undecoded request URI path. You probably do B<NOT>
|
||||
want to use this to dispatch requests.
|
||||
|
||||
=item path_info
|
||||
|
||||
Returns B<PATH_INFO> in the environment. Use this to get the local
|
||||
path for the requests.
|
||||
|
||||
=item path
|
||||
|
||||
Similar to C<path_info> but returns C</> in case it is empty. In other
|
||||
words, it returns the virtual path of the request URI after C<<
|
||||
$req->base >>. See L</"DISPATCHING"> for details.
|
||||
|
||||
=item script_name
|
||||
|
||||
Returns B<SCRIPT_NAME> in the environment. This is the absolute path
|
||||
where your application is hosted.
|
||||
|
||||
=item scheme
|
||||
|
||||
Returns the scheme (C<http> or C<https>) of the request.
|
||||
|
||||
=item secure
|
||||
|
||||
Returns true or false, indicating whether the connection is secure (https).
|
||||
|
||||
=item body, input
|
||||
|
||||
Returns C<psgi.input> handle.
|
||||
|
||||
=item session
|
||||
|
||||
Returns (optional) C<psgix.session> hash. When it exists, you can
|
||||
retrieve and store per-session data from and to this hash.
|
||||
|
||||
=item session_options
|
||||
|
||||
Returns (optional) C<psgix.session.options> hash.
|
||||
|
||||
=item logger
|
||||
|
||||
Returns (optional) C<psgix.logger> code reference. When it exists,
|
||||
your application is supposed to send the log message to this logger,
|
||||
using:
|
||||
|
||||
$req->logger->({ level => 'debug', message => "This is a debug message" });
|
||||
|
||||
=item cookies
|
||||
|
||||
Returns a reference to a hash containing the cookies. Values are
|
||||
strings that are sent by clients and are URI decoded.
|
||||
|
||||
=item query_parameters
|
||||
|
||||
Returns a reference to a hash containing query string (GET)
|
||||
parameters. This hash reference is L<Hash::MultiValue> object.
|
||||
|
||||
=item body_parameters
|
||||
|
||||
Returns a reference to a hash containing posted parameters in the
|
||||
request body (POST). As with C<query_parameters>, the hash
|
||||
reference is a L<Hash::MultiValue> object.
|
||||
|
||||
=item parameters
|
||||
|
||||
Returns a L<Hash::MultiValue> hash reference containing (merged) GET
|
||||
and POST parameters.
|
||||
|
||||
=item content, raw_body
|
||||
|
||||
Returns the request content in an undecoded byte string for POST requests.
|
||||
|
||||
=item uri
|
||||
|
||||
Returns an URI object for the current request. The URI is constructed
|
||||
using various environment values such as C<SCRIPT_NAME>, C<PATH_INFO>,
|
||||
C<QUERY_STRING>, C<HTTP_HOST>, C<SERVER_NAME> and C<SERVER_PORT>.
|
||||
|
||||
Every time this method is called it returns a new, cloned URI object.
|
||||
|
||||
=item base
|
||||
|
||||
Returns an URI object for the base path of current request. This is
|
||||
like C<uri> but only contains up to C<SCRIPT_NAME> where your
|
||||
application is hosted at.
|
||||
|
||||
Every time this method is called it returns a new, cloned URI object.
|
||||
|
||||
=item user
|
||||
|
||||
Returns C<REMOTE_USER> if it's set.
|
||||
|
||||
=item headers
|
||||
|
||||
Returns an L<HTTP::Headers> object containing the headers for the current request.
|
||||
|
||||
=item uploads
|
||||
|
||||
Returns a reference to a hash containing uploads. The hash reference
|
||||
is a L<Hash::MultiValue> object and values are L<Plack::Request::Upload>
|
||||
objects.
|
||||
|
||||
=item content_encoding
|
||||
|
||||
Shortcut to $req->headers->content_encoding.
|
||||
|
||||
=item content_length
|
||||
|
||||
Shortcut to $req->headers->content_length.
|
||||
|
||||
=item content_type
|
||||
|
||||
Shortcut to $req->headers->content_type.
|
||||
|
||||
=item header
|
||||
|
||||
Shortcut to $req->headers->header.
|
||||
|
||||
=item referer
|
||||
|
||||
Shortcut to $req->headers->referer.
|
||||
|
||||
=item user_agent
|
||||
|
||||
Shortcut to $req->headers->user_agent.
|
||||
|
||||
=item param
|
||||
|
||||
Returns GET and POST parameters with a CGI.pm-compatible param
|
||||
method. This is an alternative method for accessing parameters in
|
||||
$req->parameters. Unlike CGI.pm, it does I<not> allow
|
||||
setting or modifying query parameters.
|
||||
|
||||
$value = $req->param( 'foo' );
|
||||
@values = $req->param( 'foo' );
|
||||
@params = $req->param;
|
||||
|
||||
=item upload
|
||||
|
||||
A convenient method to access $req->uploads.
|
||||
|
||||
$upload = $req->upload('field');
|
||||
@uploads = $req->upload('field');
|
||||
@fields = $req->upload;
|
||||
|
||||
for my $upload ( $req->upload('field') ) {
|
||||
print $upload->filename;
|
||||
}
|
||||
|
||||
=item new_response
|
||||
|
||||
my $res = $req->new_response;
|
||||
|
||||
Creates a new L<Plack::Response> object. Handy to remove dependency on
|
||||
L<Plack::Response> in your code for easy subclassing and duck typing
|
||||
in web application frameworks, as well as overriding Response
|
||||
generation in middlewares.
|
||||
|
||||
=back
|
||||
|
||||
=head2 Hash::MultiValue parameters
|
||||
|
||||
Parameters that can take one or multiple values (i.e. C<parameters>,
|
||||
C<query_parameters>, C<body_parameters> and C<uploads>) store the
|
||||
hash reference as a L<Hash::MultiValue> object. This means you can use
|
||||
the hash reference as a plain hash where values are B<always> scalars
|
||||
(B<NOT> array references), so you don't need to code ugly and unsafe
|
||||
C<< ref ... eq 'ARRAY' >> anymore.
|
||||
|
||||
And if you explicitly want to get multiple values of the same key, you
|
||||
can call the C<get_all> method on it, such as:
|
||||
|
||||
my @foo = $req->query_parameters->get_all('foo');
|
||||
|
||||
You can also call C<get_one> to always get one parameter independent
|
||||
of the context (unlike C<param>), and even call C<mixed> (with
|
||||
Hash::MultiValue 0.05 or later) to get the I<traditional> hash
|
||||
reference,
|
||||
|
||||
my $params = $req->parameters->mixed;
|
||||
|
||||
where values are either a scalar or an array reference depending on
|
||||
input, so it might be useful if you already have the code to deal with
|
||||
that ugliness.
|
||||
|
||||
=head2 PARSING POST BODY and MULTIPLE OBJECTS
|
||||
|
||||
The methods to parse request body (C<content>, C<body_parameters> and
|
||||
C<uploads>) are carefully coded to save the parsed body in the
|
||||
environment hash as well as in the temporary buffer, so you can call
|
||||
them multiple times and create Plack::Request objects multiple times
|
||||
in a request and they should work safely, and won't parse request body
|
||||
more than twice for the efficiency.
|
||||
|
||||
=head1 DISPATCHING
|
||||
|
||||
If your application or framework wants to dispatch (or route) actions
|
||||
based on request paths, be sure to use C<< $req->path_info >> not C<<
|
||||
$req->uri->path >>.
|
||||
|
||||
This is because C<path_info> gives you the virtual path of the request,
|
||||
regardless of how your application is mounted. If your application is
|
||||
hosted with mod_perl or CGI scripts, or even multiplexed with tools
|
||||
like L<Plack::App::URLMap>, request's C<path_info> always gives you
|
||||
the action path.
|
||||
|
||||
Note that C<path_info> might give you an empty string, in which case
|
||||
you should assume that the path is C</>.
|
||||
|
||||
You will also want to use C<< $req->base >> as a base prefix when
|
||||
building URLs in your templates or in redirections. It's a good idea
|
||||
for you to subclass Plack::Request and define methods such as:
|
||||
|
||||
sub uri_for {
|
||||
my($self, $path, $args) = @_;
|
||||
my $uri = $self->base;
|
||||
$uri->path($uri->path . $path);
|
||||
$uri->query_form(@$args) if $args;
|
||||
$uri;
|
||||
}
|
||||
|
||||
So you can say:
|
||||
|
||||
my $link = $req->uri_for('/logout', [ signoff => 1 ]);
|
||||
|
||||
and if C<< $req->base >> is C</app> you'll get the full URI for
|
||||
C</app/logout?signoff=1>.
|
||||
|
||||
=head1 INCOMPATIBILITIES
|
||||
|
||||
In version 0.99, many utility methods are removed or deprecated, and
|
||||
most methods are made read-only.
|
||||
|
||||
The following methods are deprecated: C<hostname>, C<url_scheme>,
|
||||
C<params>, C<query_params>, C<body_params>, C<cookie> and
|
||||
C<raw_uri>. They will be removed in the next major release.
|
||||
|
||||
All parameter-related methods such as C<parameters>,
|
||||
C<body_parameters>, C<query_parameters> and C<uploads> now contains
|
||||
L<Hash::MultiValue> objects, rather than I<scalar or an array
|
||||
reference depending on the user input> which is insecure. See
|
||||
L<Hash::MultiValue> for more about this change.
|
||||
|
||||
C<< $req->path >> method had a bug, where the code and the document
|
||||
was mismatching. The document was suggesting it returns the sub
|
||||
request path after C<< $req->base >> but the code was always returning
|
||||
the absolute URI path. The code is now updated to be an alias of C<<
|
||||
$req->path_info >> but returns C</> in case it's empty. If you need
|
||||
the older behavior, just call C<< $req->uri->path >> instead.
|
||||
|
||||
Cookie handling is simplified, and doesn't use L<CGI::Simple::Cookie>
|
||||
anymore, which means you B<CAN NOT> set array reference or hash
|
||||
reference as a cookie value and expect it be serialized. You're always
|
||||
required to set string value, and encoding or decoding them is totally
|
||||
up to your application or framework. Also, C<cookies> hash reference
|
||||
now returns I<strings> for the cookies rather than CGI::Simple::Cookie
|
||||
objects, which means you no longer have to write a wacky code such as:
|
||||
|
||||
$v = $req->cookie->{foo} ? $req->cookie->{foo}->value : undef;
|
||||
|
||||
and instead, simply do:
|
||||
|
||||
$v = $req->cookie->{foo};
|
||||
|
||||
=head1 AUTHORS
|
||||
|
||||
Tatsuhiko Miyagawa
|
||||
|
||||
Kazuhiro Osawa
|
||||
|
||||
Tokuhiro Matsuno
|
||||
|
||||
=head1 SEE ALSO
|
||||
|
||||
L<Plack::Response> L<HTTP::Request>, L<Catalyst::Request>
|
||||
|
||||
=head1 LICENSE
|
||||
|
||||
This library is free software; you can redistribute it and/or modify
|
||||
it under the same terms as Perl itself.
|
||||
|
||||
=cut
|
||||
309
test/fixtures/perl/Response.pm
vendored
Normal file
309
test/fixtures/perl/Response.pm
vendored
Normal file
@@ -0,0 +1,309 @@
|
||||
package Plack::Response;
|
||||
use strict;
|
||||
use warnings;
|
||||
our $VERSION = '0.9988';
|
||||
$VERSION = eval $VERSION;
|
||||
|
||||
use Plack::Util::Accessor qw(body status);
|
||||
use Carp ();
|
||||
use Scalar::Util ();
|
||||
use HTTP::Headers;
|
||||
use URI::Escape ();
|
||||
|
||||
sub code { shift->status(@_) }
|
||||
sub content { shift->body(@_) }
|
||||
|
||||
sub new {
|
||||
my($class, $rc, $headers, $content) = @_;
|
||||
|
||||
my $self = bless {}, $class;
|
||||
$self->status($rc) if defined $rc;
|
||||
$self->headers($headers) if defined $headers;
|
||||
$self->body($content) if defined $content;
|
||||
|
||||
$self;
|
||||
}
|
||||
|
||||
sub headers {
|
||||
my $self = shift;
|
||||
|
||||
if (@_) {
|
||||
my $headers = shift;
|
||||
if (ref $headers eq 'ARRAY') {
|
||||
Carp::carp("Odd number of headers") if @$headers % 2 != 0;
|
||||
$headers = HTTP::Headers->new(@$headers);
|
||||
} elsif (ref $headers eq 'HASH') {
|
||||
$headers = HTTP::Headers->new(%$headers);
|
||||
}
|
||||
return $self->{headers} = $headers;
|
||||
} else {
|
||||
return $self->{headers} ||= HTTP::Headers->new();
|
||||
}
|
||||
}
|
||||
|
||||
sub cookies {
|
||||
my $self = shift;
|
||||
if (@_) {
|
||||
$self->{cookies} = shift;
|
||||
} else {
|
||||
return $self->{cookies} ||= +{ };
|
||||
}
|
||||
}
|
||||
|
||||
sub header { shift->headers->header(@_) } # shortcut
|
||||
|
||||
sub content_length {
|
||||
shift->headers->content_length(@_);
|
||||
}
|
||||
|
||||
sub content_type {
|
||||
shift->headers->content_type(@_);
|
||||
}
|
||||
|
||||
sub content_encoding {
|
||||
shift->headers->content_encoding(@_);
|
||||
}
|
||||
|
||||
sub location {
|
||||
my $self = shift;
|
||||
return $self->headers->header('Location' => @_);
|
||||
}
|
||||
|
||||
sub redirect {
|
||||
my $self = shift;
|
||||
|
||||
if (@_) {
|
||||
my $url = shift;
|
||||
my $status = shift || 302;
|
||||
$self->location($url);
|
||||
$self->status($status);
|
||||
}
|
||||
|
||||
return $self->location;
|
||||
}
|
||||
|
||||
sub finalize {
|
||||
my $self = shift;
|
||||
Carp::croak "missing status" unless $self->status();
|
||||
|
||||
my $headers = $self->headers->clone;
|
||||
$self->_finalize_cookies($headers);
|
||||
|
||||
return [
|
||||
$self->status,
|
||||
+[
|
||||
map {
|
||||
my $k = $_;
|
||||
map {
|
||||
my $v = $_;
|
||||
$v =~ s/\015\012[\040|\011]+/chr(32)/ge; # replace LWS with a single SP
|
||||
$v =~ s/\015|\012//g; # remove CR and LF since the char is invalid here
|
||||
|
||||
( $k => $v )
|
||||
} $headers->header($_);
|
||||
|
||||
} $headers->header_field_names
|
||||
],
|
||||
$self->_body,
|
||||
];
|
||||
}
|
||||
|
||||
sub _body {
|
||||
my $self = shift;
|
||||
my $body = $self->body;
|
||||
$body = [] unless defined $body;
|
||||
if (!ref $body or Scalar::Util::blessed($body) && overload::Method($body, q("")) && !$body->can('getline')) {
|
||||
return [ $body ];
|
||||
} else {
|
||||
return $body;
|
||||
}
|
||||
}
|
||||
|
||||
sub _finalize_cookies {
|
||||
my($self, $headers) = @_;
|
||||
|
||||
while (my($name, $val) = each %{$self->cookies}) {
|
||||
my $cookie = $self->_bake_cookie($name, $val);
|
||||
$headers->push_header('Set-Cookie' => $cookie);
|
||||
}
|
||||
}
|
||||
|
||||
sub _bake_cookie {
|
||||
my($self, $name, $val) = @_;
|
||||
|
||||
return '' unless defined $val;
|
||||
$val = { value => $val } unless ref $val eq 'HASH';
|
||||
|
||||
my @cookie = ( URI::Escape::uri_escape($name) . "=" . URI::Escape::uri_escape($val->{value}) );
|
||||
push @cookie, "domain=" . $val->{domain} if $val->{domain};
|
||||
push @cookie, "path=" . $val->{path} if $val->{path};
|
||||
push @cookie, "expires=" . $self->_date($val->{expires}) if $val->{expires};
|
||||
push @cookie, "secure" if $val->{secure};
|
||||
push @cookie, "HttpOnly" if $val->{httponly};
|
||||
|
||||
return join "; ", @cookie;
|
||||
}
|
||||
|
||||
my @MON = qw( Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec );
|
||||
my @WDAY = qw( Sun Mon Tue Wed Thu Fri Sat );
|
||||
|
||||
sub _date {
|
||||
my($self, $expires) = @_;
|
||||
|
||||
if ($expires =~ /^\d+$/) {
|
||||
# all numbers -> epoch date
|
||||
# (cookies use '-' as date separator, HTTP uses ' ')
|
||||
my($sec, $min, $hour, $mday, $mon, $year, $wday) = gmtime($expires);
|
||||
$year += 1900;
|
||||
|
||||
return sprintf("%s, %02d-%s-%04d %02d:%02d:%02d GMT",
|
||||
$WDAY[$wday], $mday, $MON[$mon], $year, $hour, $min, $sec);
|
||||
|
||||
}
|
||||
|
||||
return $expires;
|
||||
}
|
||||
|
||||
1;
|
||||
__END__
|
||||
|
||||
=head1 NAME
|
||||
|
||||
Plack::Response - Portable HTTP Response object for PSGI response
|
||||
|
||||
=head1 SYNOPSIS
|
||||
|
||||
use Plack::Response;
|
||||
|
||||
sub psgi_handler {
|
||||
my $env = shift;
|
||||
|
||||
my $res = Plack::Response->new(200);
|
||||
$res->content_type('text/html');
|
||||
$res->body("Hello World");
|
||||
|
||||
return $res->finalize;
|
||||
}
|
||||
|
||||
=head1 DESCRIPTION
|
||||
|
||||
Plack::Response allows you a way to create PSGI response array ref through a simple API.
|
||||
|
||||
=head1 METHODS
|
||||
|
||||
=over 4
|
||||
|
||||
=item new
|
||||
|
||||
$res = Plack::Response->new;
|
||||
$res = Plack::Response->new($status);
|
||||
$res = Plack::Response->new($status, $headers);
|
||||
$res = Plack::Response->new($status, $headers, $body);
|
||||
|
||||
Creates a new Plack::Response object.
|
||||
|
||||
=item status
|
||||
|
||||
$res->status(200);
|
||||
$status = $res->status;
|
||||
|
||||
Sets and gets HTTP status code. C<code> is an alias.
|
||||
|
||||
=item headers
|
||||
|
||||
$headers = $res->headers;
|
||||
$res->headers([ 'Content-Type' => 'text/html' ]);
|
||||
$res->headers({ 'Content-Type' => 'text/html' });
|
||||
$res->headers( HTTP::Headers->new );
|
||||
|
||||
Sets and gets HTTP headers of the response. Setter can take either an
|
||||
array ref, a hash ref or L<HTTP::Headers> object containing a list of
|
||||
headers.
|
||||
|
||||
=item body
|
||||
|
||||
$res->body($body_str);
|
||||
$res->body([ "Hello", "World" ]);
|
||||
$res->body($io);
|
||||
|
||||
Gets and sets HTTP response body. Setter can take either a string, an
|
||||
array ref, or an IO::Handle-like object. C<content> is an alias.
|
||||
|
||||
Note that this method doesn't automatically set I<Content-Length> for
|
||||
the response. You have to set it manually if you want, with the
|
||||
C<content_length> method (see below).
|
||||
|
||||
=item header
|
||||
|
||||
$res->header('X-Foo' => 'bar');
|
||||
my $val = $res->header('X-Foo');
|
||||
|
||||
Shortcut for C<< $res->headers->header >>.
|
||||
|
||||
=item content_type, content_length, content_encoding
|
||||
|
||||
$res->content_type('text/plain');
|
||||
$res->content_length(123);
|
||||
$res->content_encoding('gzip');
|
||||
|
||||
Shortcut for the equivalent get/set methods in C<< $res->headers >>.
|
||||
|
||||
=item redirect
|
||||
|
||||
$res->redirect($url);
|
||||
$res->redirect($url, 301);
|
||||
|
||||
Sets redirect URL with an optional status code, which defaults to 302.
|
||||
|
||||
Note that this method doesn't normalize the given URI string. Users of
|
||||
this module have to be responsible about properly encoding URI paths
|
||||
and parameters.
|
||||
|
||||
=item location
|
||||
|
||||
Gets and sets C<Location> header.
|
||||
|
||||
Note that this method doesn't normalize the given URI string in the
|
||||
setter. See above in C<redirect> for details.
|
||||
|
||||
=item cookies
|
||||
|
||||
$res->cookies->{foo} = 123;
|
||||
$res->cookies->{foo} = { value => '123' };
|
||||
|
||||
Returns a hash reference containing cookies to be set in the
|
||||
response. The keys of the hash are the cookies' names, and their
|
||||
corresponding values are a plain string (for C<value> with everything
|
||||
else defaults) or a hash reference that can contain keys such as
|
||||
C<value>, C<domain>, C<expires>, C<path>, C<httponly>, C<secure>.
|
||||
|
||||
C<expires> can take a string or an integer (as an epoch time) and
|
||||
B<does not> convert string formats such as C<+3M>.
|
||||
|
||||
$res->cookies->{foo} = {
|
||||
value => 'test',
|
||||
path => "/",
|
||||
domain => '.example.com',
|
||||
expires => time + 24 * 60 * 60,
|
||||
};
|
||||
|
||||
=item finalize
|
||||
|
||||
$res->finalize;
|
||||
|
||||
Returns the status code, headers, and body of this response as a PSGI
|
||||
response array reference.
|
||||
|
||||
=back
|
||||
|
||||
=head1 AUTHOR
|
||||
|
||||
Tokuhiro Matsuno
|
||||
|
||||
Tatsuhiko Miyagawa
|
||||
|
||||
=head1 SEE ALSO
|
||||
|
||||
L<Plack::Request>
|
||||
|
||||
=cut
|
||||
2844
test/fixtures/perl/ack
vendored
Executable file
2844
test/fixtures/perl/ack
vendored
Executable file
File diff suppressed because it is too large
Load Diff
42
test/fixtures/perl/fib.pl
vendored
Normal file
42
test/fixtures/perl/fib.pl
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
#! perl
|
||||
# Copyright (C) 2001-2003, Parrot Foundation.
|
||||
|
||||
=head1 NAME
|
||||
|
||||
examples/benchmarks/fib.pl - Fibonacci Benchmark
|
||||
|
||||
=head1 SYNOPSIS
|
||||
|
||||
% time perl examples/benchmarks/fib.pl n
|
||||
|
||||
=head1 DESCRIPTION
|
||||
|
||||
Calculates the Fibonacci Number for C<n> (defaults to 28 if
|
||||
unspecified).
|
||||
|
||||
=cut
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
sub fib {
|
||||
my $n = shift;
|
||||
return $n if ( $n < 2 );
|
||||
return fib( $n - 1 ) + fib( $n - 2 );
|
||||
}
|
||||
my $N = shift || 28;
|
||||
|
||||
print "fib($N) = ", fib($N), "\n";
|
||||
|
||||
=head1 SEE ALSO
|
||||
|
||||
F<examples/benchmarks/fib.pir>.
|
||||
|
||||
=cut
|
||||
|
||||
# Local Variables:
|
||||
# mode: cperl
|
||||
# cperl-indent-level: 4
|
||||
# fill-column: 100
|
||||
# End:
|
||||
# vim: expandtab shiftwidth=4:
|
||||
27
test/fixtures/perl/oo1.pl
vendored
Normal file
27
test/fixtures/perl/oo1.pl
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
#! perl
|
||||
|
||||
# Copyright (C) 2004-2006, Parrot Foundation.
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
for my $i ( 1 .. 100000 ) {
|
||||
my $o = new Foo();
|
||||
}
|
||||
my $o = new Foo();
|
||||
print $o->[0], "\n";
|
||||
|
||||
package Foo;
|
||||
|
||||
sub new {
|
||||
my $self = ref $_[0] ? ref shift : shift;
|
||||
return bless [ 10, 20 ], $self;
|
||||
}
|
||||
1;
|
||||
|
||||
# Local Variables:
|
||||
# mode: cperl
|
||||
# cperl-indent-level: 4
|
||||
# fill-column: 100
|
||||
# End:
|
||||
# vim: expandtab shiftwidth=4:
|
||||
27
test/fixtures/perl/oo2.pl
vendored
Normal file
27
test/fixtures/perl/oo2.pl
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
#! perl
|
||||
|
||||
# Copyright (C) 2004-2006, Parrot Foundation.
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
for my $i ( 1 .. 500000 ) {
|
||||
my $o = new Foo();
|
||||
}
|
||||
my $o = new Foo();
|
||||
print $o->[0], "\n";
|
||||
|
||||
package Foo;
|
||||
|
||||
sub new {
|
||||
my $self = ref $_[0] ? ref shift : shift;
|
||||
return bless [ 10, 20 ], $self;
|
||||
}
|
||||
1;
|
||||
|
||||
# Local Variables:
|
||||
# mode: cperl
|
||||
# cperl-indent-level: 4
|
||||
# fill-column: 100
|
||||
# End:
|
||||
# vim: expandtab shiftwidth=4:
|
||||
29
test/fixtures/perl/oo3.pl
vendored
Normal file
29
test/fixtures/perl/oo3.pl
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
#! perl
|
||||
|
||||
# Copyright (C) 2004-2006, Parrot Foundation.
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
my $o = new Foo();
|
||||
for my $i ( 1 .. 500000 ) {
|
||||
my $x = $o->[0];
|
||||
my $y = $o->[1];
|
||||
}
|
||||
print $o->[0], "\n";
|
||||
|
||||
package Foo;
|
||||
|
||||
sub new {
|
||||
my $self = ref $_[0] ? ref shift : shift;
|
||||
return bless [ 10, 20 ], $self;
|
||||
}
|
||||
|
||||
1;
|
||||
|
||||
# Local Variables:
|
||||
# mode: cperl
|
||||
# cperl-indent-level: 4
|
||||
# fill-column: 100
|
||||
# End:
|
||||
# vim: expandtab shiftwidth=4:
|
||||
1066
test/fixtures/php/Application.php
vendored
Normal file
1066
test/fixtures/php/Application.php
vendored
Normal file
File diff suppressed because it is too large
Load Diff
492
test/fixtures/php/Client.php
vendored
Normal file
492
test/fixtures/php/Client.php
vendored
Normal file
@@ -0,0 +1,492 @@
|
||||
<?php
|
||||
|
||||
/*
|
||||
* This file is part of the Symfony package.
|
||||
*
|
||||
* (c) Fabien Potencier <fabien@symfony.com>
|
||||
*
|
||||
* For the full copyright and license information, please view the LICENSE
|
||||
* file that was distributed with this source code.
|
||||
*/
|
||||
|
||||
namespace Symfony\Component\BrowserKit;
|
||||
|
||||
use Symfony\Component\DomCrawler\Crawler;
|
||||
use Symfony\Component\DomCrawler\Link;
|
||||
use Symfony\Component\DomCrawler\Form;
|
||||
use Symfony\Component\Process\PhpProcess;
|
||||
|
||||
/**
|
||||
* Client simulates a browser.
|
||||
*
|
||||
* To make the actual request, you need to implement the doRequest() method.
|
||||
*
|
||||
* If you want to be able to run requests in their own process (insulated flag),
|
||||
* you need to also implement the getScript() method.
|
||||
*
|
||||
* @author Fabien Potencier <fabien@symfony.com>
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
abstract class Client
|
||||
{
|
||||
protected $history;
|
||||
protected $cookieJar;
|
||||
protected $server;
|
||||
protected $request;
|
||||
protected $response;
|
||||
protected $crawler;
|
||||
protected $insulated;
|
||||
protected $redirect;
|
||||
protected $followRedirects;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param array $server The server parameters (equivalent of $_SERVER)
|
||||
* @param History $history A History instance to store the browser history
|
||||
* @param CookieJar $cookieJar A CookieJar instance to store the cookies
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function __construct(array $server = array(), History $history = null, CookieJar $cookieJar = null)
|
||||
{
|
||||
$this->setServerParameters($server);
|
||||
$this->history = null === $history ? new History() : $history;
|
||||
$this->cookieJar = null === $cookieJar ? new CookieJar() : $cookieJar;
|
||||
$this->insulated = false;
|
||||
$this->followRedirects = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets whether to automatically follow redirects or not.
|
||||
*
|
||||
* @param Boolean $followRedirect Whether to follow redirects
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function followRedirects($followRedirect = true)
|
||||
{
|
||||
$this->followRedirects = (Boolean) $followRedirect;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the insulated flag.
|
||||
*
|
||||
* @param Boolean $insulated Whether to insulate the requests or not
|
||||
*
|
||||
* @throws \RuntimeException When Symfony Process Component is not installed
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function insulate($insulated = true)
|
||||
{
|
||||
if ($insulated && !class_exists('Symfony\\Component\\Process\\Process')) {
|
||||
// @codeCoverageIgnoreStart
|
||||
throw new \RuntimeException('Unable to isolate requests as the Symfony Process Component is not installed.');
|
||||
// @codeCoverageIgnoreEnd
|
||||
}
|
||||
|
||||
$this->insulated = (Boolean) $insulated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets server parameters.
|
||||
*
|
||||
* @param array $server An array of server parameters
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function setServerParameters(array $server)
|
||||
{
|
||||
$this->server = array_merge(array(
|
||||
'HTTP_HOST' => 'localhost',
|
||||
'HTTP_USER_AGENT' => 'Symfony2 BrowserKit',
|
||||
), $server);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets single server parameter.
|
||||
*
|
||||
* @param string $key A key of the parameter
|
||||
* @param string $value A value of the parameter
|
||||
*/
|
||||
public function setServerParameter($key, $value)
|
||||
{
|
||||
$this->server[$key] = $value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets single server parameter for specified key.
|
||||
*
|
||||
* @param string $key A key of the parameter to get
|
||||
* @param string $default A default value when key is undefined
|
||||
*
|
||||
* @return string A value of the parameter
|
||||
*/
|
||||
public function getServerParameter($key, $default = '')
|
||||
{
|
||||
return (isset($this->server[$key])) ? $this->server[$key] : $default;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the History instance.
|
||||
*
|
||||
* @return History A History instance
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function getHistory()
|
||||
{
|
||||
return $this->history;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the CookieJar instance.
|
||||
*
|
||||
* @return CookieJar A CookieJar instance
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function getCookieJar()
|
||||
{
|
||||
return $this->cookieJar;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current Crawler instance.
|
||||
*
|
||||
* @return Crawler A Crawler instance
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function getCrawler()
|
||||
{
|
||||
return $this->crawler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current Response instance.
|
||||
*
|
||||
* @return Response A Response instance
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function getResponse()
|
||||
{
|
||||
return $this->response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current Request instance.
|
||||
*
|
||||
* @return Request A Request instance
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function getRequest()
|
||||
{
|
||||
return $this->request;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks on a given link.
|
||||
*
|
||||
* @param Link $link A Link instance
|
||||
*
|
||||
* @return Crawler
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function click(Link $link)
|
||||
{
|
||||
if ($link instanceof Form) {
|
||||
return $this->submit($link);
|
||||
}
|
||||
|
||||
return $this->request($link->getMethod(), $link->getUri());
|
||||
}
|
||||
|
||||
/**
|
||||
* Submits a form.
|
||||
*
|
||||
* @param Form $form A Form instance
|
||||
* @param array $values An array of form field values
|
||||
*
|
||||
* @return Crawler
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function submit(Form $form, array $values = array())
|
||||
{
|
||||
$form->setValues($values);
|
||||
|
||||
return $this->request($form->getMethod(), $form->getUri(), $form->getPhpValues(), $form->getPhpFiles());
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls a URI.
|
||||
*
|
||||
* @param string $method The request method
|
||||
* @param string $uri The URI to fetch
|
||||
* @param array $parameters The Request parameters
|
||||
* @param array $files The files
|
||||
* @param array $server The server parameters (HTTP headers are referenced with a HTTP_ prefix as PHP does)
|
||||
* @param string $content The raw body data
|
||||
* @param Boolean $changeHistory Whether to update the history or not (only used internally for back(), forward(), and reload())
|
||||
*
|
||||
* @return Crawler
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function request($method, $uri, array $parameters = array(), array $files = array(), array $server = array(), $content = null, $changeHistory = true)
|
||||
{
|
||||
$uri = $this->getAbsoluteUri($uri);
|
||||
|
||||
$server = array_merge($this->server, $server);
|
||||
if (!$this->history->isEmpty()) {
|
||||
$server['HTTP_REFERER'] = $this->history->current()->getUri();
|
||||
}
|
||||
$server['HTTP_HOST'] = parse_url($uri, PHP_URL_HOST);
|
||||
$server['HTTPS'] = 'https' == parse_url($uri, PHP_URL_SCHEME);
|
||||
|
||||
$request = new Request($uri, $method, $parameters, $files, $this->cookieJar->allValues($uri), $server, $content);
|
||||
|
||||
$this->request = $this->filterRequest($request);
|
||||
|
||||
if (true === $changeHistory) {
|
||||
$this->history->add($request);
|
||||
}
|
||||
|
||||
if ($this->insulated) {
|
||||
$this->response = $this->doRequestInProcess($this->request);
|
||||
} else {
|
||||
$this->response = $this->doRequest($this->request);
|
||||
}
|
||||
|
||||
$response = $this->filterResponse($this->response);
|
||||
|
||||
$this->cookieJar->updateFromResponse($response);
|
||||
|
||||
$this->redirect = $response->getHeader('Location');
|
||||
|
||||
if ($this->followRedirects && $this->redirect) {
|
||||
return $this->crawler = $this->followRedirect();
|
||||
}
|
||||
|
||||
return $this->crawler = $this->createCrawlerFromContent($request->getUri(), $response->getContent(), $response->getHeader('Content-Type'));
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a request in another process.
|
||||
*
|
||||
* @param Request $request A Request instance
|
||||
*
|
||||
* @return Response A Response instance
|
||||
*
|
||||
* @throws \RuntimeException When processing returns exit code
|
||||
*/
|
||||
protected function doRequestInProcess($request)
|
||||
{
|
||||
// We set the TMPDIR (for Macs) and TEMP (for Windows), because on these platforms the temp directory changes based on the user.
|
||||
$process = new PhpProcess($this->getScript($request), null, array('TMPDIR' => sys_get_temp_dir(), 'TEMP' => sys_get_temp_dir()));
|
||||
$process->run();
|
||||
|
||||
if (!$process->isSuccessful() || !preg_match('/^O\:\d+\:/', $process->getOutput())) {
|
||||
throw new \RuntimeException('OUTPUT: '.$process->getOutput().' ERROR OUTPUT: '.$process->getErrorOutput());
|
||||
}
|
||||
|
||||
return unserialize($process->getOutput());
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a request.
|
||||
*
|
||||
* @param Request $request A Request instance
|
||||
*
|
||||
* @return Response A Response instance
|
||||
*/
|
||||
abstract protected function doRequest($request);
|
||||
|
||||
/**
|
||||
* Returns the script to execute when the request must be insulated.
|
||||
*
|
||||
* @param Request $request A Request instance
|
||||
*
|
||||
* @throws \LogicException When this abstract class is not implemented
|
||||
*/
|
||||
protected function getScript($request)
|
||||
{
|
||||
// @codeCoverageIgnoreStart
|
||||
throw new \LogicException('To insulate requests, you need to override the getScript() method.');
|
||||
// @codeCoverageIgnoreEnd
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters the request.
|
||||
*
|
||||
* @param Request $request The request to filter
|
||||
*
|
||||
* @return Request
|
||||
*/
|
||||
protected function filterRequest(Request $request)
|
||||
{
|
||||
return $request;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters the Response.
|
||||
*
|
||||
* @param Response $response The Response to filter
|
||||
*
|
||||
* @return Response
|
||||
*/
|
||||
protected function filterResponse($response)
|
||||
{
|
||||
return $response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a crawler.
|
||||
*
|
||||
* This method returns null if the DomCrawler component is not available.
|
||||
*
|
||||
* @param string $uri A uri
|
||||
* @param string $content Content for the crawler to use
|
||||
* @param string $type Content type
|
||||
*
|
||||
* @return Crawler|null
|
||||
*/
|
||||
protected function createCrawlerFromContent($uri, $content, $type)
|
||||
{
|
||||
if (!class_exists('Symfony\Component\DomCrawler\Crawler')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
$crawler = new Crawler(null, $uri);
|
||||
$crawler->addContent($content, $type);
|
||||
|
||||
return $crawler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Goes back in the browser history.
|
||||
*
|
||||
* @return Crawler
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function back()
|
||||
{
|
||||
return $this->requestFromRequest($this->history->back(), false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Goes forward in the browser history.
|
||||
*
|
||||
* @return Crawler
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function forward()
|
||||
{
|
||||
return $this->requestFromRequest($this->history->forward(), false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reloads the current browser.
|
||||
*
|
||||
* @return Crawler
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function reload()
|
||||
{
|
||||
return $this->requestFromRequest($this->history->current(), false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Follow redirects?
|
||||
*
|
||||
* @return Crawler
|
||||
*
|
||||
* @throws \LogicException If request was not a redirect
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function followRedirect()
|
||||
{
|
||||
if (empty($this->redirect)) {
|
||||
throw new \LogicException('The request was not redirected.');
|
||||
}
|
||||
|
||||
return $this->request('get', $this->redirect);
|
||||
}
|
||||
|
||||
/**
|
||||
* Restarts the client.
|
||||
*
|
||||
* It flushes history and all cookies.
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function restart()
|
||||
{
|
||||
$this->cookieJar->clear();
|
||||
$this->history->clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a URI and converts it to absolute if it is not already absolute.
|
||||
*
|
||||
* @param string $uri A uri
|
||||
*
|
||||
* @return string An absolute uri
|
||||
*/
|
||||
protected function getAbsoluteUri($uri)
|
||||
{
|
||||
// already absolute?
|
||||
if (0 === strpos($uri, 'http')) {
|
||||
return $uri;
|
||||
}
|
||||
|
||||
if (!$this->history->isEmpty()) {
|
||||
$currentUri = $this->history->current()->getUri();
|
||||
} else {
|
||||
$currentUri = sprintf('http%s://%s/',
|
||||
isset($this->server['HTTPS']) ? 's' : '',
|
||||
isset($this->server['HTTP_HOST']) ? $this->server['HTTP_HOST'] : 'localhost'
|
||||
);
|
||||
}
|
||||
|
||||
// anchor?
|
||||
if (!$uri || '#' == $uri[0]) {
|
||||
return preg_replace('/#.*?$/', '', $currentUri).$uri;
|
||||
}
|
||||
|
||||
if ('/' !== $uri[0]) {
|
||||
$path = parse_url($currentUri, PHP_URL_PATH);
|
||||
|
||||
if ('/' !== substr($path, -1)) {
|
||||
$path = substr($path, 0, strrpos($path, '/') + 1);
|
||||
}
|
||||
|
||||
$uri = $path.$uri;
|
||||
}
|
||||
|
||||
return preg_replace('#^(.*?//[^/]+)\/.*$#', '$1', $currentUri).$uri;
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a request from a Request object directly.
|
||||
*
|
||||
* @param Request $request A Request instance
|
||||
* @param Boolean $changeHistory Whether to update the history or not (only used internally for back(), forward(), and reload())
|
||||
*
|
||||
* @return Crawler
|
||||
*/
|
||||
protected function requestFromRequest(Request $request, $changeHistory = true)
|
||||
{
|
||||
return $this->request($request->getMethod(), $request->getUri(), $request->getParameters(), $request->getFiles(), $request->getServer(), $request->getContent(), $changeHistory);
|
||||
}
|
||||
}
|
||||
1230
test/fixtures/php/Controller.php
vendored
Normal file
1230
test/fixtures/php/Controller.php
vendored
Normal file
File diff suppressed because it is too large
Load Diff
586
test/fixtures/php/Form.php
vendored
Normal file
586
test/fixtures/php/Form.php
vendored
Normal file
@@ -0,0 +1,586 @@
|
||||
<?php
|
||||
|
||||
/*
|
||||
* This file is part of the Symfony package.
|
||||
*
|
||||
* (c) Fabien Potencier <fabien@symfony.com>
|
||||
*
|
||||
* For the full copyright and license information, please view the LICENSE
|
||||
* file that was distributed with this source code.
|
||||
*/
|
||||
|
||||
namespace Symfony\Component\DomCrawler;
|
||||
|
||||
use Symfony\Component\DomCrawler\Field\FormField;
|
||||
|
||||
/**
|
||||
* Form represents an HTML form.
|
||||
*
|
||||
* @author Fabien Potencier <fabien@symfony.com>
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
class Form extends Link implements \ArrayAccess
|
||||
{
|
||||
/**
|
||||
* @var \DOMNode
|
||||
*/
|
||||
private $button;
|
||||
/**
|
||||
* @var Field\FormField[]
|
||||
*/
|
||||
private $fields;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param \DOMNode $node A \DOMNode instance
|
||||
* @param string $currentUri The URI of the page where the form is embedded
|
||||
* @param string $method The method to use for the link (if null, it defaults to the method defined by the form)
|
||||
*
|
||||
* @throws \LogicException if the node is not a button inside a form tag
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function __construct(\DOMNode $node, $currentUri, $method = null)
|
||||
{
|
||||
parent::__construct($node, $currentUri, $method);
|
||||
|
||||
$this->initialize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the form node associated with this form.
|
||||
*
|
||||
* @return \DOMNode A \DOMNode instance
|
||||
*/
|
||||
public function getFormNode()
|
||||
{
|
||||
return $this->node;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of the fields.
|
||||
*
|
||||
* @param array $values An array of field values
|
||||
*
|
||||
* @return Form
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function setValues(array $values)
|
||||
{
|
||||
foreach ($values as $name => $value) {
|
||||
$this->fields->set($name, $value);
|
||||
}
|
||||
|
||||
return $this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the field values.
|
||||
*
|
||||
* The returned array does not include file fields (@see getFiles).
|
||||
*
|
||||
* @return array An array of field values.
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function getValues()
|
||||
{
|
||||
$values = array();
|
||||
foreach ($this->fields->all() as $name => $field) {
|
||||
if ($field->isDisabled()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!$field instanceof Field\FileFormField && $field->hasValue()) {
|
||||
$values[$name] = $field->getValue();
|
||||
}
|
||||
}
|
||||
|
||||
return $values;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the file field values.
|
||||
*
|
||||
* @return array An array of file field values.
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function getFiles()
|
||||
{
|
||||
if (!in_array($this->getMethod(), array('POST', 'PUT', 'DELETE', 'PATCH'))) {
|
||||
return array();
|
||||
}
|
||||
|
||||
$files = array();
|
||||
|
||||
foreach ($this->fields->all() as $name => $field) {
|
||||
if ($field->isDisabled()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if ($field instanceof Field\FileFormField) {
|
||||
$files[$name] = $field->getValue();
|
||||
}
|
||||
}
|
||||
|
||||
return $files;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the field values as PHP.
|
||||
*
|
||||
* This method converts fields with the array notation
|
||||
* (like foo[bar] to arrays) like PHP does.
|
||||
*
|
||||
* @return array An array of field values.
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function getPhpValues()
|
||||
{
|
||||
$qs = http_build_query($this->getValues(), '', '&');
|
||||
parse_str($qs, $values);
|
||||
|
||||
return $values;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the file field values as PHP.
|
||||
*
|
||||
* This method converts fields with the array notation
|
||||
* (like foo[bar] to arrays) like PHP does.
|
||||
*
|
||||
* @return array An array of field values.
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function getPhpFiles()
|
||||
{
|
||||
$qs = http_build_query($this->getFiles(), '', '&');
|
||||
parse_str($qs, $values);
|
||||
|
||||
return $values;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the URI of the form.
|
||||
*
|
||||
* The returned URI is not the same as the form "action" attribute.
|
||||
* This method merges the value if the method is GET to mimics
|
||||
* browser behavior.
|
||||
*
|
||||
* @return string The URI
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function getUri()
|
||||
{
|
||||
$uri = parent::getUri();
|
||||
|
||||
if (!in_array($this->getMethod(), array('POST', 'PUT', 'DELETE', 'PATCH')) && $queryString = http_build_query($this->getValues(), null, '&')) {
|
||||
$sep = false === strpos($uri, '?') ? '?' : '&';
|
||||
$uri .= $sep.$queryString;
|
||||
}
|
||||
|
||||
return $uri;
|
||||
}
|
||||
|
||||
protected function getRawUri()
|
||||
{
|
||||
return $this->node->getAttribute('action');
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the form method.
|
||||
*
|
||||
* If no method is defined in the form, GET is returned.
|
||||
*
|
||||
* @return string The method
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function getMethod()
|
||||
{
|
||||
if (null !== $this->method) {
|
||||
return $this->method;
|
||||
}
|
||||
|
||||
return $this->node->getAttribute('method') ? strtoupper($this->node->getAttribute('method')) : 'GET';
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the named field exists.
|
||||
*
|
||||
* @param string $name The field name
|
||||
*
|
||||
* @return Boolean true if the field exists, false otherwise
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function has($name)
|
||||
{
|
||||
return $this->fields->has($name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a field from the form.
|
||||
*
|
||||
* @param string $name The field name
|
||||
*
|
||||
* @throws \InvalidArgumentException when the name is malformed
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function remove($name)
|
||||
{
|
||||
$this->fields->remove($name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a named field.
|
||||
*
|
||||
* @param string $name The field name
|
||||
*
|
||||
* @return FormField The field instance
|
||||
*
|
||||
* @throws \InvalidArgumentException When field is not present in this form
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function get($name)
|
||||
{
|
||||
return $this->fields->get($name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a named field.
|
||||
*
|
||||
* @param FormField $field The field
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function set(FormField $field)
|
||||
{
|
||||
$this->fields->add($field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all fields.
|
||||
*
|
||||
* @return array An array of fields
|
||||
*
|
||||
* @api
|
||||
*/
|
||||
public function all()
|
||||
{
|
||||
return $this->fields->all();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the named field exists.
|
||||
*
|
||||
* @param string $name The field name
|
||||
*
|
||||
* @return Boolean true if the field exists, false otherwise
|
||||
*/
|
||||
public function offsetExists($name)
|
||||
{
|
||||
return $this->has($name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the value of a field.
|
||||
*
|
||||
* @param string $name The field name
|
||||
*
|
||||
* @return FormField The associated Field instance
|
||||
*
|
||||
* @throws \InvalidArgumentException if the field does not exist
|
||||
*/
|
||||
public function offsetGet($name)
|
||||
{
|
||||
return $this->fields->get($name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of a field.
|
||||
*
|
||||
* @param string $name The field name
|
||||
* @param string|array $value The value of the field
|
||||
*
|
||||
* @throws \InvalidArgumentException if the field does not exist
|
||||
*/
|
||||
public function offsetSet($name, $value)
|
||||
{
|
||||
$this->fields->set($name, $value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a field from the form.
|
||||
*
|
||||
* @param string $name The field name
|
||||
*/
|
||||
public function offsetUnset($name)
|
||||
{
|
||||
$this->fields->remove($name);
|
||||
}
|
||||
|
||||
protected function setNode(\DOMNode $node)
|
||||
{
|
||||
$this->button = $node;
|
||||
if ('button' == $node->nodeName || ('input' == $node->nodeName && in_array($node->getAttribute('type'), array('submit', 'button', 'image')))) {
|
||||
do {
|
||||
// use the ancestor form element
|
||||
if (null === $node = $node->parentNode) {
|
||||
throw new \LogicException('The selected node does not have a form ancestor.');
|
||||
}
|
||||
} while ('form' != $node->nodeName);
|
||||
} elseif ('form' != $node->nodeName) {
|
||||
throw new \LogicException(sprintf('Unable to submit on a "%s" tag.', $node->nodeName));
|
||||
}
|
||||
|
||||
$this->node = $node;
|
||||
}
|
||||
|
||||
private function initialize()
|
||||
{
|
||||
$this->fields = new FormFieldRegistry();
|
||||
|
||||
$document = new \DOMDocument('1.0', 'UTF-8');
|
||||
$node = $document->importNode($this->node, true);
|
||||
$button = $document->importNode($this->button, true);
|
||||
$root = $document->appendChild($document->createElement('_root'));
|
||||
$root->appendChild($node);
|
||||
$root->appendChild($button);
|
||||
$xpath = new \DOMXPath($document);
|
||||
|
||||
foreach ($xpath->query('descendant::input | descendant::textarea | descendant::select', $root) as $node) {
|
||||
if (!$node->hasAttribute('name')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
$nodeName = $node->nodeName;
|
||||
|
||||
if ($node === $button) {
|
||||
$this->set(new Field\InputFormField($node));
|
||||
} elseif ('select' == $nodeName || 'input' == $nodeName && 'checkbox' == $node->getAttribute('type')) {
|
||||
$this->set(new Field\ChoiceFormField($node));
|
||||
} elseif ('input' == $nodeName && 'radio' == $node->getAttribute('type')) {
|
||||
if ($this->has($node->getAttribute('name'))) {
|
||||
$this->get($node->getAttribute('name'))->addChoice($node);
|
||||
} else {
|
||||
$this->set(new Field\ChoiceFormField($node));
|
||||
}
|
||||
} elseif ('input' == $nodeName && 'file' == $node->getAttribute('type')) {
|
||||
$this->set(new Field\FileFormField($node));
|
||||
} elseif ('input' == $nodeName && !in_array($node->getAttribute('type'), array('submit', 'button', 'image'))) {
|
||||
$this->set(new Field\InputFormField($node));
|
||||
} elseif ('textarea' == $nodeName) {
|
||||
$this->set(new Field\TextareaFormField($node));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class FormFieldRegistry
|
||||
{
|
||||
private $fields = array();
|
||||
|
||||
private $base;
|
||||
|
||||
/**
|
||||
* Adds a field to the registry.
|
||||
*
|
||||
* @param FormField $field The field
|
||||
*
|
||||
* @throws \InvalidArgumentException when the name is malformed
|
||||
*/
|
||||
public function add(FormField $field)
|
||||
{
|
||||
$segments = $this->getSegments($field->getName());
|
||||
|
||||
$target =& $this->fields;
|
||||
while ($segments) {
|
||||
if (!is_array($target)) {
|
||||
$target = array();
|
||||
}
|
||||
$path = array_shift($segments);
|
||||
if ('' === $path) {
|
||||
$target =& $target[];
|
||||
} else {
|
||||
$target =& $target[$path];
|
||||
}
|
||||
}
|
||||
$target = $field;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a field and its children from the registry.
|
||||
*
|
||||
* @param string $name The fully qualified name of the base field
|
||||
*
|
||||
* @throws \InvalidArgumentException when the name is malformed
|
||||
*/
|
||||
public function remove($name)
|
||||
{
|
||||
$segments = $this->getSegments($name);
|
||||
$target =& $this->fields;
|
||||
while (count($segments) > 1) {
|
||||
$path = array_shift($segments);
|
||||
if (!array_key_exists($path, $target)) {
|
||||
return;
|
||||
}
|
||||
$target =& $target[$path];
|
||||
}
|
||||
unset($target[array_shift($segments)]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value of the field and its children.
|
||||
*
|
||||
* @param string $name The fully qualified name of the field
|
||||
*
|
||||
* @return mixed The value of the field
|
||||
*
|
||||
* @throws \InvalidArgumentException when the name is malformed
|
||||
* @throws \InvalidArgumentException if the field does not exist
|
||||
*/
|
||||
public function &get($name)
|
||||
{
|
||||
$segments = $this->getSegments($name);
|
||||
$target =& $this->fields;
|
||||
while ($segments) {
|
||||
$path = array_shift($segments);
|
||||
if (!array_key_exists($path, $target)) {
|
||||
throw new \InvalidArgumentException(sprintf('Unreachable field "%s"', $path));
|
||||
}
|
||||
$target =& $target[$path];
|
||||
}
|
||||
|
||||
return $target;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests whether the form has the given field.
|
||||
*
|
||||
* @param string $name The fully qualified name of the field
|
||||
*
|
||||
* @return Boolean Whether the form has the given field
|
||||
*/
|
||||
public function has($name)
|
||||
{
|
||||
try {
|
||||
$this->get($name);
|
||||
|
||||
return true;
|
||||
} catch (\InvalidArgumentException $e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of a field and its children.
|
||||
*
|
||||
* @param string $name The fully qualified name of the field
|
||||
* @param mixed $value The value
|
||||
*
|
||||
* @throws \InvalidArgumentException when the name is malformed
|
||||
* @throws \InvalidArgumentException if the field does not exist
|
||||
*/
|
||||
public function set($name, $value)
|
||||
{
|
||||
$target =& $this->get($name);
|
||||
if (is_array($value)) {
|
||||
$fields = self::create($name, $value);
|
||||
foreach ($fields->all() as $k => $v) {
|
||||
$this->set($k, $v);
|
||||
}
|
||||
} else {
|
||||
$target->setValue($value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the list of field with their value.
|
||||
*
|
||||
* @return array The list of fields as array((string) Fully qualified name => (mixed) value)
|
||||
*/
|
||||
public function all()
|
||||
{
|
||||
return $this->walk($this->fields, $this->base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an instance of the class.
|
||||
*
|
||||
* This function is made private because it allows overriding the $base and
|
||||
* the $values properties without any type checking.
|
||||
*
|
||||
* @param string $base The fully qualified name of the base field
|
||||
* @param array $values The values of the fields
|
||||
*
|
||||
* @return FormFieldRegistry
|
||||
*/
|
||||
static private function create($base, array $values)
|
||||
{
|
||||
$registry = new static();
|
||||
$registry->base = $base;
|
||||
$registry->fields = $values;
|
||||
|
||||
return $registry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms a PHP array in a list of fully qualified name / value.
|
||||
*
|
||||
* @param array $array The PHP array
|
||||
* @param string $base The name of the base field
|
||||
* @param array $output The initial values
|
||||
*
|
||||
* @return array The list of fields as array((string) Fully qualified name => (mixed) value)
|
||||
*/
|
||||
private function walk(array $array, $base = '', array &$output = array())
|
||||
{
|
||||
foreach ($array as $k => $v) {
|
||||
$path = empty($base) ? $k : sprintf("%s[%s]", $base, $k);
|
||||
if (is_array($v)) {
|
||||
$this->walk($v, $path, $output);
|
||||
} else {
|
||||
$output[$path] = $v;
|
||||
}
|
||||
}
|
||||
|
||||
return $output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits a field name into segments as a web browser would do.
|
||||
*
|
||||
* <code>
|
||||
* getSegments('base[foo][3][]') = array('base', 'foo, '3', '');
|
||||
* </code>
|
||||
*
|
||||
* @param string $name The name of the field
|
||||
*
|
||||
* @return array The list of segments
|
||||
*
|
||||
* @throws \InvalidArgumentException when the name is malformed
|
||||
*/
|
||||
private function getSegments($name)
|
||||
{
|
||||
if (preg_match('/^(?P<base>[^[]+)(?P<extra>(\[.*)|$)/', $name, $m)) {
|
||||
$segments = array($m['base']);
|
||||
while (preg_match('/^\[(?P<segment>.*?)\](?P<extra>.*)$/', $m['extra'], $m)) {
|
||||
$segments[] = $m['segment'];
|
||||
}
|
||||
|
||||
return $segments;
|
||||
}
|
||||
|
||||
throw new \InvalidArgumentException(sprintf('Malformed field path "%s"', $name));
|
||||
}
|
||||
}
|
||||
3640
test/fixtures/php/Model.php
vendored
Normal file
3640
test/fixtures/php/Model.php
vendored
Normal file
File diff suppressed because it is too large
Load Diff
938
test/fixtures/python/django-models-base.py
vendored
Normal file
938
test/fixtures/python/django-models-base.py
vendored
Normal file
@@ -0,0 +1,938 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import copy
|
||||
import sys
|
||||
from functools import update_wrapper
|
||||
from future_builtins import zip
|
||||
|
||||
import django.db.models.manager # Imported to register signal handler.
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import (ObjectDoesNotExist,
|
||||
MultipleObjectsReturned, FieldError, ValidationError, NON_FIELD_ERRORS)
|
||||
from django.core import validators
|
||||
from django.db.models.fields import AutoField, FieldDoesNotExist
|
||||
from django.db.models.fields.related import (ManyToOneRel,
|
||||
OneToOneField, add_lazy_relation)
|
||||
from django.db import (router, transaction, DatabaseError,
|
||||
DEFAULT_DB_ALIAS)
|
||||
from django.db.models.query import Q
|
||||
from django.db.models.query_utils import DeferredAttribute
|
||||
from django.db.models.deletion import Collector
|
||||
from django.db.models.options import Options
|
||||
from django.db.models import signals
|
||||
from django.db.models.loading import register_models, get_model
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.functional import curry
|
||||
from django.utils.encoding import smart_str, force_unicode
|
||||
from django.utils.text import get_text_list, capfirst
|
||||
|
||||
|
||||
class ModelBase(type):
|
||||
"""
|
||||
Metaclass for all models.
|
||||
"""
|
||||
def __new__(cls, name, bases, attrs):
|
||||
super_new = super(ModelBase, cls).__new__
|
||||
parents = [b for b in bases if isinstance(b, ModelBase)]
|
||||
if not parents:
|
||||
# If this isn't a subclass of Model, don't do anything special.
|
||||
return super_new(cls, name, bases, attrs)
|
||||
|
||||
# Create the class.
|
||||
module = attrs.pop('__module__')
|
||||
new_class = super_new(cls, name, bases, {'__module__': module})
|
||||
attr_meta = attrs.pop('Meta', None)
|
||||
abstract = getattr(attr_meta, 'abstract', False)
|
||||
if not attr_meta:
|
||||
meta = getattr(new_class, 'Meta', None)
|
||||
else:
|
||||
meta = attr_meta
|
||||
base_meta = getattr(new_class, '_meta', None)
|
||||
|
||||
if getattr(meta, 'app_label', None) is None:
|
||||
# Figure out the app_label by looking one level up.
|
||||
# For 'django.contrib.sites.models', this would be 'sites'.
|
||||
model_module = sys.modules[new_class.__module__]
|
||||
kwargs = {"app_label": model_module.__name__.split('.')[-2]}
|
||||
else:
|
||||
kwargs = {}
|
||||
|
||||
new_class.add_to_class('_meta', Options(meta, **kwargs))
|
||||
if not abstract:
|
||||
new_class.add_to_class('DoesNotExist', subclass_exception(b'DoesNotExist',
|
||||
tuple(x.DoesNotExist
|
||||
for x in parents if hasattr(x, '_meta') and not x._meta.abstract)
|
||||
or (ObjectDoesNotExist,), module))
|
||||
new_class.add_to_class('MultipleObjectsReturned', subclass_exception(b'MultipleObjectsReturned',
|
||||
tuple(x.MultipleObjectsReturned
|
||||
for x in parents if hasattr(x, '_meta') and not x._meta.abstract)
|
||||
or (MultipleObjectsReturned,), module))
|
||||
if base_meta and not base_meta.abstract:
|
||||
# Non-abstract child classes inherit some attributes from their
|
||||
# non-abstract parent (unless an ABC comes before it in the
|
||||
# method resolution order).
|
||||
if not hasattr(meta, 'ordering'):
|
||||
new_class._meta.ordering = base_meta.ordering
|
||||
if not hasattr(meta, 'get_latest_by'):
|
||||
new_class._meta.get_latest_by = base_meta.get_latest_by
|
||||
|
||||
is_proxy = new_class._meta.proxy
|
||||
|
||||
if getattr(new_class, '_default_manager', None):
|
||||
if not is_proxy:
|
||||
# Multi-table inheritance doesn't inherit default manager from
|
||||
# parents.
|
||||
new_class._default_manager = None
|
||||
new_class._base_manager = None
|
||||
else:
|
||||
# Proxy classes do inherit parent's default manager, if none is
|
||||
# set explicitly.
|
||||
new_class._default_manager = new_class._default_manager._copy_to_model(new_class)
|
||||
new_class._base_manager = new_class._base_manager._copy_to_model(new_class)
|
||||
|
||||
# Bail out early if we have already created this class.
|
||||
m = get_model(new_class._meta.app_label, name,
|
||||
seed_cache=False, only_installed=False)
|
||||
if m is not None:
|
||||
return m
|
||||
|
||||
# Add all attributes to the class.
|
||||
for obj_name, obj in attrs.items():
|
||||
new_class.add_to_class(obj_name, obj)
|
||||
|
||||
# All the fields of any type declared on this model
|
||||
new_fields = new_class._meta.local_fields + \
|
||||
new_class._meta.local_many_to_many + \
|
||||
new_class._meta.virtual_fields
|
||||
field_names = set([f.name for f in new_fields])
|
||||
|
||||
# Basic setup for proxy models.
|
||||
if is_proxy:
|
||||
base = None
|
||||
for parent in [cls for cls in parents if hasattr(cls, '_meta')]:
|
||||
if parent._meta.abstract:
|
||||
if parent._meta.fields:
|
||||
raise TypeError("Abstract base class containing model fields not permitted for proxy model '%s'." % name)
|
||||
else:
|
||||
continue
|
||||
if base is not None:
|
||||
raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name)
|
||||
else:
|
||||
base = parent
|
||||
if base is None:
|
||||
raise TypeError("Proxy model '%s' has no non-abstract model base class." % name)
|
||||
if (new_class._meta.local_fields or
|
||||
new_class._meta.local_many_to_many):
|
||||
raise FieldError("Proxy model '%s' contains model fields." % name)
|
||||
new_class._meta.setup_proxy(base)
|
||||
new_class._meta.concrete_model = base._meta.concrete_model
|
||||
else:
|
||||
new_class._meta.concrete_model = new_class
|
||||
|
||||
# Do the appropriate setup for any model parents.
|
||||
o2o_map = dict([(f.rel.to, f) for f in new_class._meta.local_fields
|
||||
if isinstance(f, OneToOneField)])
|
||||
|
||||
for base in parents:
|
||||
original_base = base
|
||||
if not hasattr(base, '_meta'):
|
||||
# Things without _meta aren't functional models, so they're
|
||||
# uninteresting parents.
|
||||
continue
|
||||
|
||||
parent_fields = base._meta.local_fields + base._meta.local_many_to_many
|
||||
# Check for clashes between locally declared fields and those
|
||||
# on the base classes (we cannot handle shadowed fields at the
|
||||
# moment).
|
||||
for field in parent_fields:
|
||||
if field.name in field_names:
|
||||
raise FieldError('Local field %r in class %r clashes '
|
||||
'with field of similar name from '
|
||||
'base class %r' %
|
||||
(field.name, name, base.__name__))
|
||||
if not base._meta.abstract:
|
||||
# Concrete classes...
|
||||
base = base._meta.concrete_model
|
||||
if base in o2o_map:
|
||||
field = o2o_map[base]
|
||||
elif not is_proxy:
|
||||
attr_name = '%s_ptr' % base._meta.module_name
|
||||
field = OneToOneField(base, name=attr_name,
|
||||
auto_created=True, parent_link=True)
|
||||
new_class.add_to_class(attr_name, field)
|
||||
else:
|
||||
field = None
|
||||
new_class._meta.parents[base] = field
|
||||
else:
|
||||
# .. and abstract ones.
|
||||
for field in parent_fields:
|
||||
new_class.add_to_class(field.name, copy.deepcopy(field))
|
||||
|
||||
# Pass any non-abstract parent classes onto child.
|
||||
new_class._meta.parents.update(base._meta.parents)
|
||||
|
||||
# Inherit managers from the abstract base classes.
|
||||
new_class.copy_managers(base._meta.abstract_managers)
|
||||
|
||||
# Proxy models inherit the non-abstract managers from their base,
|
||||
# unless they have redefined any of them.
|
||||
if is_proxy:
|
||||
new_class.copy_managers(original_base._meta.concrete_managers)
|
||||
|
||||
# Inherit virtual fields (like GenericForeignKey) from the parent
|
||||
# class
|
||||
for field in base._meta.virtual_fields:
|
||||
if base._meta.abstract and field.name in field_names:
|
||||
raise FieldError('Local field %r in class %r clashes '\
|
||||
'with field of similar name from '\
|
||||
'abstract base class %r' % \
|
||||
(field.name, name, base.__name__))
|
||||
new_class.add_to_class(field.name, copy.deepcopy(field))
|
||||
|
||||
if abstract:
|
||||
# Abstract base models can't be instantiated and don't appear in
|
||||
# the list of models for an app. We do the final setup for them a
|
||||
# little differently from normal models.
|
||||
attr_meta.abstract = False
|
||||
new_class.Meta = attr_meta
|
||||
return new_class
|
||||
|
||||
new_class._prepare()
|
||||
register_models(new_class._meta.app_label, new_class)
|
||||
|
||||
# Because of the way imports happen (recursively), we may or may not be
|
||||
# the first time this model tries to register with the framework. There
|
||||
# should only be one class for each model, so we always return the
|
||||
# registered version.
|
||||
return get_model(new_class._meta.app_label, name,
|
||||
seed_cache=False, only_installed=False)
|
||||
|
||||
def copy_managers(cls, base_managers):
|
||||
# This is in-place sorting of an Options attribute, but that's fine.
|
||||
base_managers.sort()
|
||||
for _, mgr_name, manager in base_managers:
|
||||
val = getattr(cls, mgr_name, None)
|
||||
if not val or val is manager:
|
||||
new_manager = manager._copy_to_model(cls)
|
||||
cls.add_to_class(mgr_name, new_manager)
|
||||
|
||||
def add_to_class(cls, name, value):
|
||||
if hasattr(value, 'contribute_to_class'):
|
||||
value.contribute_to_class(cls, name)
|
||||
else:
|
||||
setattr(cls, name, value)
|
||||
|
||||
def _prepare(cls):
|
||||
"""
|
||||
Creates some methods once self._meta has been populated.
|
||||
"""
|
||||
opts = cls._meta
|
||||
opts._prepare(cls)
|
||||
|
||||
if opts.order_with_respect_to:
|
||||
cls.get_next_in_order = curry(cls._get_next_or_previous_in_order, is_next=True)
|
||||
cls.get_previous_in_order = curry(cls._get_next_or_previous_in_order, is_next=False)
|
||||
# defer creating accessors on the foreign class until we are
|
||||
# certain it has been created
|
||||
def make_foreign_order_accessors(field, model, cls):
|
||||
setattr(
|
||||
field.rel.to,
|
||||
'get_%s_order' % cls.__name__.lower(),
|
||||
curry(method_get_order, cls)
|
||||
)
|
||||
setattr(
|
||||
field.rel.to,
|
||||
'set_%s_order' % cls.__name__.lower(),
|
||||
curry(method_set_order, cls)
|
||||
)
|
||||
add_lazy_relation(
|
||||
cls,
|
||||
opts.order_with_respect_to,
|
||||
opts.order_with_respect_to.rel.to,
|
||||
make_foreign_order_accessors
|
||||
)
|
||||
|
||||
# Give the class a docstring -- its definition.
|
||||
if cls.__doc__ is None:
|
||||
cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join([f.attname for f in opts.fields]))
|
||||
|
||||
if hasattr(cls, 'get_absolute_url'):
|
||||
cls.get_absolute_url = update_wrapper(curry(get_absolute_url, opts, cls.get_absolute_url),
|
||||
cls.get_absolute_url)
|
||||
|
||||
signals.class_prepared.send(sender=cls)
|
||||
|
||||
class ModelState(object):
|
||||
"""
|
||||
A class for storing instance state
|
||||
"""
|
||||
def __init__(self, db=None):
|
||||
self.db = db
|
||||
# If true, uniqueness validation checks will consider this a new, as-yet-unsaved object.
|
||||
# Necessary for correct validation of new instances of objects with explicit (non-auto) PKs.
|
||||
# This impacts validation only; it has no effect on the actual save.
|
||||
self.adding = True
|
||||
|
||||
class Model(object):
|
||||
__metaclass__ = ModelBase
|
||||
_deferred = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs)
|
||||
|
||||
# Set up the storage for instance state
|
||||
self._state = ModelState()
|
||||
|
||||
# There is a rather weird disparity here; if kwargs, it's set, then args
|
||||
# overrides it. It should be one or the other; don't duplicate the work
|
||||
# The reason for the kwargs check is that standard iterator passes in by
|
||||
# args, and instantiation for iteration is 33% faster.
|
||||
args_len = len(args)
|
||||
if args_len > len(self._meta.fields):
|
||||
# Daft, but matches old exception sans the err msg.
|
||||
raise IndexError("Number of args exceeds number of fields")
|
||||
|
||||
fields_iter = iter(self._meta.fields)
|
||||
if not kwargs:
|
||||
# The ordering of the zip calls matter - zip throws StopIteration
|
||||
# when an iter throws it. So if the first iter throws it, the second
|
||||
# is *not* consumed. We rely on this, so don't change the order
|
||||
# without changing the logic.
|
||||
for val, field in zip(args, fields_iter):
|
||||
setattr(self, field.attname, val)
|
||||
else:
|
||||
# Slower, kwargs-ready version.
|
||||
for val, field in zip(args, fields_iter):
|
||||
setattr(self, field.attname, val)
|
||||
kwargs.pop(field.name, None)
|
||||
# Maintain compatibility with existing calls.
|
||||
if isinstance(field.rel, ManyToOneRel):
|
||||
kwargs.pop(field.attname, None)
|
||||
|
||||
# Now we're left with the unprocessed fields that *must* come from
|
||||
# keywords, or default.
|
||||
|
||||
for field in fields_iter:
|
||||
is_related_object = False
|
||||
# This slightly odd construct is so that we can access any
|
||||
# data-descriptor object (DeferredAttribute) without triggering its
|
||||
# __get__ method.
|
||||
if (field.attname not in kwargs and
|
||||
isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute)):
|
||||
# This field will be populated on request.
|
||||
continue
|
||||
if kwargs:
|
||||
if isinstance(field.rel, ManyToOneRel):
|
||||
try:
|
||||
# Assume object instance was passed in.
|
||||
rel_obj = kwargs.pop(field.name)
|
||||
is_related_object = True
|
||||
except KeyError:
|
||||
try:
|
||||
# Object instance wasn't passed in -- must be an ID.
|
||||
val = kwargs.pop(field.attname)
|
||||
except KeyError:
|
||||
val = field.get_default()
|
||||
else:
|
||||
# Object instance was passed in. Special case: You can
|
||||
# pass in "None" for related objects if it's allowed.
|
||||
if rel_obj is None and field.null:
|
||||
val = None
|
||||
else:
|
||||
try:
|
||||
val = kwargs.pop(field.attname)
|
||||
except KeyError:
|
||||
# This is done with an exception rather than the
|
||||
# default argument on pop because we don't want
|
||||
# get_default() to be evaluated, and then not used.
|
||||
# Refs #12057.
|
||||
val = field.get_default()
|
||||
else:
|
||||
val = field.get_default()
|
||||
if is_related_object:
|
||||
# If we are passed a related instance, set it using the
|
||||
# field.name instead of field.attname (e.g. "user" instead of
|
||||
# "user_id") so that the object gets properly cached (and type
|
||||
# checked) by the RelatedObjectDescriptor.
|
||||
setattr(self, field.name, rel_obj)
|
||||
else:
|
||||
setattr(self, field.attname, val)
|
||||
|
||||
if kwargs:
|
||||
for prop in kwargs.keys():
|
||||
try:
|
||||
if isinstance(getattr(self.__class__, prop), property):
|
||||
setattr(self, prop, kwargs.pop(prop))
|
||||
except AttributeError:
|
||||
pass
|
||||
if kwargs:
|
||||
raise TypeError("'%s' is an invalid keyword argument for this function" % kwargs.keys()[0])
|
||||
super(Model, self).__init__()
|
||||
signals.post_init.send(sender=self.__class__, instance=self)
|
||||
|
||||
def __repr__(self):
|
||||
try:
|
||||
u = unicode(self)
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
u = '[Bad Unicode data]'
|
||||
return smart_str('<%s: %s>' % (self.__class__.__name__, u))
|
||||
|
||||
def __str__(self):
|
||||
if hasattr(self, '__unicode__'):
|
||||
return force_unicode(self).encode('utf-8')
|
||||
return '%s object' % self.__class__.__name__
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__) and self._get_pk_val() == other._get_pk_val()
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._get_pk_val())
|
||||
|
||||
def __reduce__(self):
|
||||
"""
|
||||
Provides pickling support. Normally, this just dispatches to Python's
|
||||
standard handling. However, for models with deferred field loading, we
|
||||
need to do things manually, as they're dynamically created classes and
|
||||
only module-level classes can be pickled by the default path.
|
||||
"""
|
||||
data = self.__dict__
|
||||
model = self.__class__
|
||||
# The obvious thing to do here is to invoke super().__reduce__()
|
||||
# for the non-deferred case. Don't do that.
|
||||
# On Python 2.4, there is something weird with __reduce__,
|
||||
# and as a result, the super call will cause an infinite recursion.
|
||||
# See #10547 and #12121.
|
||||
defers = []
|
||||
if self._deferred:
|
||||
from django.db.models.query_utils import deferred_class_factory
|
||||
factory = deferred_class_factory
|
||||
for field in self._meta.fields:
|
||||
if isinstance(self.__class__.__dict__.get(field.attname),
|
||||
DeferredAttribute):
|
||||
defers.append(field.attname)
|
||||
model = self._meta.proxy_for_model
|
||||
else:
|
||||
factory = simple_class_factory
|
||||
return (model_unpickle, (model, defers, factory), data)
|
||||
|
||||
def _get_pk_val(self, meta=None):
|
||||
if not meta:
|
||||
meta = self._meta
|
||||
return getattr(self, meta.pk.attname)
|
||||
|
||||
def _set_pk_val(self, value):
|
||||
return setattr(self, self._meta.pk.attname, value)
|
||||
|
||||
pk = property(_get_pk_val, _set_pk_val)
|
||||
|
||||
def serializable_value(self, field_name):
|
||||
"""
|
||||
Returns the value of the field name for this instance. If the field is
|
||||
a foreign key, returns the id value, instead of the object. If there's
|
||||
no Field object with this name on the model, the model attribute's
|
||||
value is returned directly.
|
||||
|
||||
Used to serialize a field's value (in the serializer, or form output,
|
||||
for example). Normally, you would just access the attribute directly
|
||||
and not use this method.
|
||||
"""
|
||||
try:
|
||||
field = self._meta.get_field_by_name(field_name)[0]
|
||||
except FieldDoesNotExist:
|
||||
return getattr(self, field_name)
|
||||
return getattr(self, field.attname)
|
||||
|
||||
def save(self, force_insert=False, force_update=False, using=None,
|
||||
update_fields=None):
|
||||
"""
|
||||
Saves the current instance. Override this in a subclass if you want to
|
||||
control the saving process.
|
||||
|
||||
The 'force_insert' and 'force_update' parameters can be used to insist
|
||||
that the "save" must be an SQL insert or update (or equivalent for
|
||||
non-SQL backends), respectively. Normally, they should not be set.
|
||||
"""
|
||||
if force_insert and (force_update or update_fields):
|
||||
raise ValueError("Cannot force both insert and updating in model saving.")
|
||||
|
||||
if update_fields is not None:
|
||||
# If update_fields is empty, skip the save. We do also check for
|
||||
# no-op saves later on for inheritance cases. This bailout is
|
||||
# still needed for skipping signal sending.
|
||||
if len(update_fields) == 0:
|
||||
return
|
||||
|
||||
update_fields = frozenset(update_fields)
|
||||
field_names = set([field.name for field in self._meta.fields
|
||||
if not field.primary_key])
|
||||
non_model_fields = update_fields.difference(field_names)
|
||||
|
||||
if non_model_fields:
|
||||
raise ValueError("The following fields do not exist in this "
|
||||
"model or are m2m fields: %s"
|
||||
% ', '.join(non_model_fields))
|
||||
|
||||
self.save_base(using=using, force_insert=force_insert,
|
||||
force_update=force_update, update_fields=update_fields)
|
||||
save.alters_data = True
|
||||
|
||||
def save_base(self, raw=False, cls=None, origin=None, force_insert=False,
|
||||
force_update=False, using=None, update_fields=None):
|
||||
"""
|
||||
Does the heavy-lifting involved in saving. Subclasses shouldn't need to
|
||||
override this method. It's separate from save() in order to hide the
|
||||
need for overrides of save() to pass around internal-only parameters
|
||||
('raw', 'cls', and 'origin').
|
||||
"""
|
||||
using = using or router.db_for_write(self.__class__, instance=self)
|
||||
assert not (force_insert and (force_update or update_fields))
|
||||
assert update_fields is None or len(update_fields) > 0
|
||||
if cls is None:
|
||||
cls = self.__class__
|
||||
meta = cls._meta
|
||||
if not meta.proxy:
|
||||
origin = cls
|
||||
else:
|
||||
meta = cls._meta
|
||||
|
||||
if origin and not meta.auto_created:
|
||||
signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using,
|
||||
update_fields=update_fields)
|
||||
|
||||
# If we are in a raw save, save the object exactly as presented.
|
||||
# That means that we don't try to be smart about saving attributes
|
||||
# that might have come from the parent class - we just save the
|
||||
# attributes we have been given to the class we have been given.
|
||||
# We also go through this process to defer the save of proxy objects
|
||||
# to their actual underlying model.
|
||||
if not raw or meta.proxy:
|
||||
if meta.proxy:
|
||||
org = cls
|
||||
else:
|
||||
org = None
|
||||
for parent, field in meta.parents.items():
|
||||
# At this point, parent's primary key field may be unknown
|
||||
# (for example, from administration form which doesn't fill
|
||||
# this field). If so, fill it.
|
||||
if field and getattr(self, parent._meta.pk.attname) is None and getattr(self, field.attname) is not None:
|
||||
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
|
||||
|
||||
self.save_base(cls=parent, origin=org, using=using,
|
||||
update_fields=update_fields)
|
||||
|
||||
if field:
|
||||
setattr(self, field.attname, self._get_pk_val(parent._meta))
|
||||
if meta.proxy:
|
||||
return
|
||||
|
||||
if not meta.proxy:
|
||||
non_pks = [f for f in meta.local_fields if not f.primary_key]
|
||||
|
||||
if update_fields:
|
||||
non_pks = [f for f in non_pks if f.name in update_fields]
|
||||
|
||||
# First, try an UPDATE. If that doesn't update anything, do an INSERT.
|
||||
pk_val = self._get_pk_val(meta)
|
||||
pk_set = pk_val is not None
|
||||
record_exists = True
|
||||
manager = cls._base_manager
|
||||
if pk_set:
|
||||
# Determine if we should do an update (pk already exists, forced update,
|
||||
# no force_insert)
|
||||
if ((force_update or update_fields) or (not force_insert and
|
||||
manager.using(using).filter(pk=pk_val).exists())):
|
||||
if force_update or non_pks:
|
||||
values = [(f, None, (raw and getattr(self, f.attname) or f.pre_save(self, False))) for f in non_pks]
|
||||
if values:
|
||||
rows = manager.using(using).filter(pk=pk_val)._update(values)
|
||||
if force_update and not rows:
|
||||
raise DatabaseError("Forced update did not affect any rows.")
|
||||
if update_fields and not rows:
|
||||
raise DatabaseError("Save with update_fields did not affect any rows.")
|
||||
else:
|
||||
record_exists = False
|
||||
if not pk_set or not record_exists:
|
||||
if meta.order_with_respect_to:
|
||||
# If this is a model with an order_with_respect_to
|
||||
# autopopulate the _order field
|
||||
field = meta.order_with_respect_to
|
||||
order_value = manager.using(using).filter(**{field.name: getattr(self, field.attname)}).count()
|
||||
self._order = order_value
|
||||
|
||||
fields = meta.local_fields
|
||||
if not pk_set:
|
||||
if force_update or update_fields:
|
||||
raise ValueError("Cannot force an update in save() with no primary key.")
|
||||
fields = [f for f in fields if not isinstance(f, AutoField)]
|
||||
|
||||
record_exists = False
|
||||
|
||||
update_pk = bool(meta.has_auto_field and not pk_set)
|
||||
result = manager._insert([self], fields=fields, return_id=update_pk, using=using, raw=raw)
|
||||
|
||||
if update_pk:
|
||||
setattr(self, meta.pk.attname, result)
|
||||
transaction.commit_unless_managed(using=using)
|
||||
|
||||
# Store the database on which the object was saved
|
||||
self._state.db = using
|
||||
# Once saved, this is no longer a to-be-added instance.
|
||||
self._state.adding = False
|
||||
|
||||
# Signal that the save is complete
|
||||
if origin and not meta.auto_created:
|
||||
signals.post_save.send(sender=origin, instance=self, created=(not record_exists),
|
||||
update_fields=update_fields, raw=raw, using=using)
|
||||
|
||||
|
||||
save_base.alters_data = True
|
||||
|
||||
def delete(self, using=None):
|
||||
using = using or router.db_for_write(self.__class__, instance=self)
|
||||
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
|
||||
|
||||
collector = Collector(using=using)
|
||||
collector.collect([self])
|
||||
collector.delete()
|
||||
|
||||
delete.alters_data = True
|
||||
|
||||
def _get_FIELD_display(self, field):
|
||||
value = getattr(self, field.attname)
|
||||
return force_unicode(dict(field.flatchoices).get(value, value), strings_only=True)
|
||||
|
||||
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
|
||||
if not self.pk:
|
||||
raise ValueError("get_next/get_previous cannot be used on unsaved objects.")
|
||||
op = is_next and 'gt' or 'lt'
|
||||
order = not is_next and '-' or ''
|
||||
param = smart_str(getattr(self, field.attname))
|
||||
q = Q(**{'%s__%s' % (field.name, op): param})
|
||||
q = q|Q(**{field.name: param, 'pk__%s' % op: self.pk})
|
||||
qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by('%s%s' % (order, field.name), '%spk' % order)
|
||||
try:
|
||||
return qs[0]
|
||||
except IndexError:
|
||||
raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name)
|
||||
|
||||
def _get_next_or_previous_in_order(self, is_next):
|
||||
cachename = "__%s_order_cache" % is_next
|
||||
if not hasattr(self, cachename):
|
||||
op = is_next and 'gt' or 'lt'
|
||||
order = not is_next and '-_order' or '_order'
|
||||
order_field = self._meta.order_with_respect_to
|
||||
obj = self._default_manager.filter(**{
|
||||
order_field.name: getattr(self, order_field.attname)
|
||||
}).filter(**{
|
||||
'_order__%s' % op: self._default_manager.values('_order').filter(**{
|
||||
self._meta.pk.name: self.pk
|
||||
})
|
||||
}).order_by(order)[:1].get()
|
||||
setattr(self, cachename, obj)
|
||||
return getattr(self, cachename)
|
||||
|
||||
def prepare_database_save(self, unused):
|
||||
return self.pk
|
||||
|
||||
def clean(self):
|
||||
"""
|
||||
Hook for doing any extra model-wide validation after clean() has been
|
||||
called on every field by self.clean_fields. Any ValidationError raised
|
||||
by this method will not be associated with a particular field; it will
|
||||
have a special-case association with the field defined by NON_FIELD_ERRORS.
|
||||
"""
|
||||
pass
|
||||
|
||||
def validate_unique(self, exclude=None):
|
||||
"""
|
||||
Checks unique constraints on the model and raises ``ValidationError``
|
||||
if any failed.
|
||||
"""
|
||||
unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
|
||||
|
||||
errors = self._perform_unique_checks(unique_checks)
|
||||
date_errors = self._perform_date_checks(date_checks)
|
||||
|
||||
for k, v in date_errors.items():
|
||||
errors.setdefault(k, []).extend(v)
|
||||
|
||||
if errors:
|
||||
raise ValidationError(errors)
|
||||
|
||||
def _get_unique_checks(self, exclude=None):
|
||||
"""
|
||||
Gather a list of checks to perform. Since validate_unique could be
|
||||
called from a ModelForm, some fields may have been excluded; we can't
|
||||
perform a unique check on a model that is missing fields involved
|
||||
in that check.
|
||||
Fields that did not validate should also be excluded, but they need
|
||||
to be passed in via the exclude argument.
|
||||
"""
|
||||
if exclude is None:
|
||||
exclude = []
|
||||
unique_checks = []
|
||||
|
||||
unique_togethers = [(self.__class__, self._meta.unique_together)]
|
||||
for parent_class in self._meta.parents.keys():
|
||||
if parent_class._meta.unique_together:
|
||||
unique_togethers.append((parent_class, parent_class._meta.unique_together))
|
||||
|
||||
for model_class, unique_together in unique_togethers:
|
||||
for check in unique_together:
|
||||
for name in check:
|
||||
# If this is an excluded field, don't add this check.
|
||||
if name in exclude:
|
||||
break
|
||||
else:
|
||||
unique_checks.append((model_class, tuple(check)))
|
||||
|
||||
# These are checks for the unique_for_<date/year/month>.
|
||||
date_checks = []
|
||||
|
||||
# Gather a list of checks for fields declared as unique and add them to
|
||||
# the list of checks.
|
||||
|
||||
fields_with_class = [(self.__class__, self._meta.local_fields)]
|
||||
for parent_class in self._meta.parents.keys():
|
||||
fields_with_class.append((parent_class, parent_class._meta.local_fields))
|
||||
|
||||
for model_class, fields in fields_with_class:
|
||||
for f in fields:
|
||||
name = f.name
|
||||
if name in exclude:
|
||||
continue
|
||||
if f.unique:
|
||||
unique_checks.append((model_class, (name,)))
|
||||
if f.unique_for_date and f.unique_for_date not in exclude:
|
||||
date_checks.append((model_class, 'date', name, f.unique_for_date))
|
||||
if f.unique_for_year and f.unique_for_year not in exclude:
|
||||
date_checks.append((model_class, 'year', name, f.unique_for_year))
|
||||
if f.unique_for_month and f.unique_for_month not in exclude:
|
||||
date_checks.append((model_class, 'month', name, f.unique_for_month))
|
||||
return unique_checks, date_checks
|
||||
|
||||
def _perform_unique_checks(self, unique_checks):
|
||||
errors = {}
|
||||
|
||||
for model_class, unique_check in unique_checks:
|
||||
# Try to look up an existing object with the same values as this
|
||||
# object's values for all the unique field.
|
||||
|
||||
lookup_kwargs = {}
|
||||
for field_name in unique_check:
|
||||
f = self._meta.get_field(field_name)
|
||||
lookup_value = getattr(self, f.attname)
|
||||
if lookup_value is None:
|
||||
# no value, skip the lookup
|
||||
continue
|
||||
if f.primary_key and not self._state.adding:
|
||||
# no need to check for unique primary key when editing
|
||||
continue
|
||||
lookup_kwargs[str(field_name)] = lookup_value
|
||||
|
||||
# some fields were skipped, no reason to do the check
|
||||
if len(unique_check) != len(lookup_kwargs.keys()):
|
||||
continue
|
||||
|
||||
qs = model_class._default_manager.filter(**lookup_kwargs)
|
||||
|
||||
# Exclude the current object from the query if we are editing an
|
||||
# instance (as opposed to creating a new one)
|
||||
# Note that we need to use the pk as defined by model_class, not
|
||||
# self.pk. These can be different fields because model inheritance
|
||||
# allows single model to have effectively multiple primary keys.
|
||||
# Refs #17615.
|
||||
model_class_pk = self._get_pk_val(model_class._meta)
|
||||
if not self._state.adding and model_class_pk is not None:
|
||||
qs = qs.exclude(pk=model_class_pk)
|
||||
if qs.exists():
|
||||
if len(unique_check) == 1:
|
||||
key = unique_check[0]
|
||||
else:
|
||||
key = NON_FIELD_ERRORS
|
||||
errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check))
|
||||
|
||||
return errors
|
||||
|
||||
def _perform_date_checks(self, date_checks):
|
||||
errors = {}
|
||||
for model_class, lookup_type, field, unique_for in date_checks:
|
||||
lookup_kwargs = {}
|
||||
# there's a ticket to add a date lookup, we can remove this special
|
||||
# case if that makes it's way in
|
||||
date = getattr(self, unique_for)
|
||||
if date is None:
|
||||
continue
|
||||
if lookup_type == 'date':
|
||||
lookup_kwargs['%s__day' % unique_for] = date.day
|
||||
lookup_kwargs['%s__month' % unique_for] = date.month
|
||||
lookup_kwargs['%s__year' % unique_for] = date.year
|
||||
else:
|
||||
lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type)
|
||||
lookup_kwargs[field] = getattr(self, field)
|
||||
|
||||
qs = model_class._default_manager.filter(**lookup_kwargs)
|
||||
# Exclude the current object from the query if we are editing an
|
||||
# instance (as opposed to creating a new one)
|
||||
if not self._state.adding and self.pk is not None:
|
||||
qs = qs.exclude(pk=self.pk)
|
||||
|
||||
if qs.exists():
|
||||
errors.setdefault(field, []).append(
|
||||
self.date_error_message(lookup_type, field, unique_for)
|
||||
)
|
||||
return errors
|
||||
|
||||
def date_error_message(self, lookup_type, field, unique_for):
|
||||
opts = self._meta
|
||||
return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % {
|
||||
'field_name': unicode(capfirst(opts.get_field(field).verbose_name)),
|
||||
'date_field': unicode(capfirst(opts.get_field(unique_for).verbose_name)),
|
||||
'lookup': lookup_type,
|
||||
}
|
||||
|
||||
def unique_error_message(self, model_class, unique_check):
|
||||
opts = model_class._meta
|
||||
model_name = capfirst(opts.verbose_name)
|
||||
|
||||
# A unique field
|
||||
if len(unique_check) == 1:
|
||||
field_name = unique_check[0]
|
||||
field = opts.get_field(field_name)
|
||||
field_label = capfirst(field.verbose_name)
|
||||
# Insert the error into the error dict, very sneaky
|
||||
return field.error_messages['unique'] % {
|
||||
'model_name': unicode(model_name),
|
||||
'field_label': unicode(field_label)
|
||||
}
|
||||
# unique_together
|
||||
else:
|
||||
field_labels = map(lambda f: capfirst(opts.get_field(f).verbose_name), unique_check)
|
||||
field_labels = get_text_list(field_labels, _('and'))
|
||||
return _("%(model_name)s with this %(field_label)s already exists.") % {
|
||||
'model_name': unicode(model_name),
|
||||
'field_label': unicode(field_labels)
|
||||
}
|
||||
|
||||
def full_clean(self, exclude=None):
|
||||
"""
|
||||
Calls clean_fields, clean, and validate_unique, on the model,
|
||||
and raises a ``ValidationError`` for any errors that occured.
|
||||
"""
|
||||
errors = {}
|
||||
if exclude is None:
|
||||
exclude = []
|
||||
|
||||
try:
|
||||
self.clean_fields(exclude=exclude)
|
||||
except ValidationError as e:
|
||||
errors = e.update_error_dict(errors)
|
||||
|
||||
# Form.clean() is run even if other validation fails, so do the
|
||||
# same with Model.clean() for consistency.
|
||||
try:
|
||||
self.clean()
|
||||
except ValidationError as e:
|
||||
errors = e.update_error_dict(errors)
|
||||
|
||||
# Run unique checks, but only for fields that passed validation.
|
||||
for name in errors.keys():
|
||||
if name != NON_FIELD_ERRORS and name not in exclude:
|
||||
exclude.append(name)
|
||||
try:
|
||||
self.validate_unique(exclude=exclude)
|
||||
except ValidationError as e:
|
||||
errors = e.update_error_dict(errors)
|
||||
|
||||
if errors:
|
||||
raise ValidationError(errors)
|
||||
|
||||
def clean_fields(self, exclude=None):
|
||||
"""
|
||||
Cleans all fields and raises a ValidationError containing message_dict
|
||||
of all validation errors if any occur.
|
||||
"""
|
||||
if exclude is None:
|
||||
exclude = []
|
||||
|
||||
errors = {}
|
||||
for f in self._meta.fields:
|
||||
if f.name in exclude:
|
||||
continue
|
||||
# Skip validation for empty fields with blank=True. The developer
|
||||
# is responsible for making sure they have a valid value.
|
||||
raw_value = getattr(self, f.attname)
|
||||
if f.blank and raw_value in validators.EMPTY_VALUES:
|
||||
continue
|
||||
try:
|
||||
setattr(self, f.attname, f.clean(raw_value, self))
|
||||
except ValidationError as e:
|
||||
errors[f.name] = e.messages
|
||||
|
||||
if errors:
|
||||
raise ValidationError(errors)
|
||||
|
||||
|
||||
############################################
|
||||
# HELPER FUNCTIONS (CURRIED MODEL METHODS) #
|
||||
############################################
|
||||
|
||||
# ORDERING METHODS #########################
|
||||
|
||||
def method_set_order(ordered_obj, self, id_list, using=None):
|
||||
if using is None:
|
||||
using = DEFAULT_DB_ALIAS
|
||||
rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name)
|
||||
order_name = ordered_obj._meta.order_with_respect_to.name
|
||||
# FIXME: It would be nice if there was an "update many" version of update
|
||||
# for situations like this.
|
||||
for i, j in enumerate(id_list):
|
||||
ordered_obj.objects.filter(**{'pk': j, order_name: rel_val}).update(_order=i)
|
||||
transaction.commit_unless_managed(using=using)
|
||||
|
||||
|
||||
def method_get_order(ordered_obj, self):
|
||||
rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name)
|
||||
order_name = ordered_obj._meta.order_with_respect_to.name
|
||||
pk_name = ordered_obj._meta.pk.name
|
||||
return [r[pk_name] for r in
|
||||
ordered_obj.objects.filter(**{order_name: rel_val}).values(pk_name)]
|
||||
|
||||
|
||||
##############################################
|
||||
# HELPER FUNCTIONS (CURRIED MODEL FUNCTIONS) #
|
||||
##############################################
|
||||
|
||||
def get_absolute_url(opts, func, self, *args, **kwargs):
|
||||
return settings.ABSOLUTE_URL_OVERRIDES.get('%s.%s' % (opts.app_label, opts.module_name), func)(self, *args, **kwargs)
|
||||
|
||||
|
||||
########
|
||||
# MISC #
|
||||
########
|
||||
|
||||
class Empty(object):
|
||||
pass
|
||||
|
||||
def simple_class_factory(model, attrs):
|
||||
"""Used to unpickle Models without deferred fields.
|
||||
|
||||
We need to do this the hard way, rather than just using
|
||||
the default __reduce__ implementation, because of a
|
||||
__deepcopy__ problem in Python 2.4
|
||||
"""
|
||||
return model
|
||||
|
||||
def model_unpickle(model, attrs, factory):
|
||||
"""
|
||||
Used to unpickle Model subclasses with deferred fields.
|
||||
"""
|
||||
cls = factory(model, attrs)
|
||||
return cls.__new__(cls)
|
||||
model_unpickle.__safe_for_unpickle__ = True
|
||||
|
||||
def subclass_exception(name, parents, module):
|
||||
return type(name, parents, {'__module__': module})
|
||||
150
test/fixtures/python/flask-view.py
vendored
Normal file
150
test/fixtures/python/flask-view.py
vendored
Normal file
@@ -0,0 +1,150 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask.views
|
||||
~~~~~~~~~~~
|
||||
|
||||
This module provides class-based views inspired by the ones in Django.
|
||||
|
||||
:copyright: (c) 2011 by Armin Ronacher.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
from .globals import request
|
||||
|
||||
|
||||
http_method_funcs = frozenset(['get', 'post', 'head', 'options',
|
||||
'delete', 'put', 'trace', 'patch'])
|
||||
|
||||
|
||||
class View(object):
|
||||
"""Alternative way to use view functions. A subclass has to implement
|
||||
:meth:`dispatch_request` which is called with the view arguments from
|
||||
the URL routing system. If :attr:`methods` is provided the methods
|
||||
do not have to be passed to the :meth:`~flask.Flask.add_url_rule`
|
||||
method explicitly::
|
||||
|
||||
class MyView(View):
|
||||
methods = ['GET']
|
||||
|
||||
def dispatch_request(self, name):
|
||||
return 'Hello %s!' % name
|
||||
|
||||
app.add_url_rule('/hello/<name>', view_func=MyView.as_view('myview'))
|
||||
|
||||
When you want to decorate a pluggable view you will have to either do that
|
||||
when the view function is created (by wrapping the return value of
|
||||
:meth:`as_view`) or you can use the :attr:`decorators` attribute::
|
||||
|
||||
class SecretView(View):
|
||||
methods = ['GET']
|
||||
decorators = [superuser_required]
|
||||
|
||||
def dispatch_request(self):
|
||||
...
|
||||
|
||||
The decorators stored in the decorators list are applied one after another
|
||||
when the view function is created. Note that you can *not* use the class
|
||||
based decorators since those would decorate the view class and not the
|
||||
generated view function!
|
||||
"""
|
||||
|
||||
#: A for which methods this pluggable view can handle.
|
||||
methods = None
|
||||
|
||||
#: The canonical way to decorate class-based views is to decorate the
|
||||
#: return value of as_view(). However since this moves parts of the
|
||||
#: logic from the class declaration to the place where it's hooked
|
||||
#: into the routing system.
|
||||
#:
|
||||
#: You can place one or more decorators in this list and whenever the
|
||||
#: view function is created the result is automatically decorated.
|
||||
#:
|
||||
#: .. versionadded:: 0.8
|
||||
decorators = []
|
||||
|
||||
def dispatch_request(self):
|
||||
"""Subclasses have to override this method to implement the
|
||||
actual view function code. This method is called with all
|
||||
the arguments from the URL rule.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def as_view(cls, name, *class_args, **class_kwargs):
|
||||
"""Converts the class into an actual view function that can be used
|
||||
with the routing system. Internally this generates a function on the
|
||||
fly which will instantiate the :class:`View` on each request and call
|
||||
the :meth:`dispatch_request` method on it.
|
||||
|
||||
The arguments passed to :meth:`as_view` are forwarded to the
|
||||
constructor of the class.
|
||||
"""
|
||||
def view(*args, **kwargs):
|
||||
self = view.view_class(*class_args, **class_kwargs)
|
||||
return self.dispatch_request(*args, **kwargs)
|
||||
|
||||
if cls.decorators:
|
||||
view.__name__ = name
|
||||
view.__module__ = cls.__module__
|
||||
for decorator in cls.decorators:
|
||||
view = decorator(view)
|
||||
|
||||
# we attach the view class to the view function for two reasons:
|
||||
# first of all it allows us to easily figure out what class-based
|
||||
# view this thing came from, secondly it's also used for instantiating
|
||||
# the view class so you can actually replace it with something else
|
||||
# for testing purposes and debugging.
|
||||
view.view_class = cls
|
||||
view.__name__ = name
|
||||
view.__doc__ = cls.__doc__
|
||||
view.__module__ = cls.__module__
|
||||
view.methods = cls.methods
|
||||
return view
|
||||
|
||||
|
||||
class MethodViewType(type):
|
||||
|
||||
def __new__(cls, name, bases, d):
|
||||
rv = type.__new__(cls, name, bases, d)
|
||||
if 'methods' not in d:
|
||||
methods = set(rv.methods or [])
|
||||
for key in d:
|
||||
if key in http_method_funcs:
|
||||
methods.add(key.upper())
|
||||
# if we have no method at all in there we don't want to
|
||||
# add a method list. (This is for instance the case for
|
||||
# the baseclass or another subclass of a base method view
|
||||
# that does not introduce new methods).
|
||||
if methods:
|
||||
rv.methods = sorted(methods)
|
||||
return rv
|
||||
|
||||
|
||||
class MethodView(View):
|
||||
"""Like a regular class-based view but that dispatches requests to
|
||||
particular methods. For instance if you implement a method called
|
||||
:meth:`get` it means you will response to ``'GET'`` requests and
|
||||
the :meth:`dispatch_request` implementation will automatically
|
||||
forward your request to that. Also :attr:`options` is set for you
|
||||
automatically::
|
||||
|
||||
class CounterAPI(MethodView):
|
||||
|
||||
def get(self):
|
||||
return session.get('counter', 0)
|
||||
|
||||
def post(self):
|
||||
session['counter'] = session.get('counter', 0) + 1
|
||||
return 'OK'
|
||||
|
||||
app.add_url_rule('/counter', view_func=CounterAPI.as_view('counter'))
|
||||
"""
|
||||
__metaclass__ = MethodViewType
|
||||
|
||||
def dispatch_request(self, *args, **kwargs):
|
||||
meth = getattr(self, request.method.lower(), None)
|
||||
# if the request method is HEAD and we don't have a handler for it
|
||||
# retry with GET
|
||||
if meth is None and request.method == 'HEAD':
|
||||
meth = getattr(self, 'get', None)
|
||||
assert meth is not None, 'Unimplemented method %r' % request.method
|
||||
return meth(*args, **kwargs)
|
||||
486
test/fixtures/python/tornado-httpserver.py
vendored
Normal file
486
test/fixtures/python/tornado-httpserver.py
vendored
Normal file
@@ -0,0 +1,486 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright 2009 Facebook
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""A non-blocking, single-threaded HTTP server.
|
||||
|
||||
Typical applications have little direct interaction with the `HTTPServer`
|
||||
class except to start a server at the beginning of the process
|
||||
(and even that is often done indirectly via `tornado.web.Application.listen`).
|
||||
|
||||
This module also defines the `HTTPRequest` class which is exposed via
|
||||
`tornado.web.RequestHandler.request`.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, with_statement
|
||||
|
||||
import Cookie
|
||||
import logging
|
||||
import socket
|
||||
import time
|
||||
|
||||
from tornado.escape import utf8, native_str, parse_qs_bytes
|
||||
from tornado import httputil
|
||||
from tornado import iostream
|
||||
from tornado.netutil import TCPServer
|
||||
from tornado import stack_context
|
||||
from tornado.util import b, bytes_type
|
||||
|
||||
try:
|
||||
import ssl # Python 2.6+
|
||||
except ImportError:
|
||||
ssl = None
|
||||
|
||||
|
||||
class HTTPServer(TCPServer):
|
||||
r"""A non-blocking, single-threaded HTTP server.
|
||||
|
||||
A server is defined by a request callback that takes an HTTPRequest
|
||||
instance as an argument and writes a valid HTTP response with
|
||||
`HTTPRequest.write`. `HTTPRequest.finish` finishes the request (but does
|
||||
not necessarily close the connection in the case of HTTP/1.1 keep-alive
|
||||
requests). A simple example server that echoes back the URI you
|
||||
requested::
|
||||
|
||||
import httpserver
|
||||
import ioloop
|
||||
|
||||
def handle_request(request):
|
||||
message = "You requested %s\n" % request.uri
|
||||
request.write("HTTP/1.1 200 OK\r\nContent-Length: %d\r\n\r\n%s" % (
|
||||
len(message), message))
|
||||
request.finish()
|
||||
|
||||
http_server = httpserver.HTTPServer(handle_request)
|
||||
http_server.listen(8888)
|
||||
ioloop.IOLoop.instance().start()
|
||||
|
||||
`HTTPServer` is a very basic connection handler. Beyond parsing the
|
||||
HTTP request body and headers, the only HTTP semantics implemented
|
||||
in `HTTPServer` is HTTP/1.1 keep-alive connections. We do not, however,
|
||||
implement chunked encoding, so the request callback must provide a
|
||||
``Content-Length`` header or implement chunked encoding for HTTP/1.1
|
||||
requests for the server to run correctly for HTTP/1.1 clients. If
|
||||
the request handler is unable to do this, you can provide the
|
||||
``no_keep_alive`` argument to the `HTTPServer` constructor, which will
|
||||
ensure the connection is closed on every request no matter what HTTP
|
||||
version the client is using.
|
||||
|
||||
If ``xheaders`` is ``True``, we support the ``X-Real-Ip`` and ``X-Scheme``
|
||||
headers, which override the remote IP and HTTP scheme for all requests.
|
||||
These headers are useful when running Tornado behind a reverse proxy or
|
||||
load balancer.
|
||||
|
||||
`HTTPServer` can serve SSL traffic with Python 2.6+ and OpenSSL.
|
||||
To make this server serve SSL traffic, send the ssl_options dictionary
|
||||
argument with the arguments required for the `ssl.wrap_socket` method,
|
||||
including "certfile" and "keyfile"::
|
||||
|
||||
HTTPServer(applicaton, ssl_options={
|
||||
"certfile": os.path.join(data_dir, "mydomain.crt"),
|
||||
"keyfile": os.path.join(data_dir, "mydomain.key"),
|
||||
})
|
||||
|
||||
`HTTPServer` initialization follows one of three patterns (the
|
||||
initialization methods are defined on `tornado.netutil.TCPServer`):
|
||||
|
||||
1. `~tornado.netutil.TCPServer.listen`: simple single-process::
|
||||
|
||||
server = HTTPServer(app)
|
||||
server.listen(8888)
|
||||
IOLoop.instance().start()
|
||||
|
||||
In many cases, `tornado.web.Application.listen` can be used to avoid
|
||||
the need to explicitly create the `HTTPServer`.
|
||||
|
||||
2. `~tornado.netutil.TCPServer.bind`/`~tornado.netutil.TCPServer.start`:
|
||||
simple multi-process::
|
||||
|
||||
server = HTTPServer(app)
|
||||
server.bind(8888)
|
||||
server.start(0) # Forks multiple sub-processes
|
||||
IOLoop.instance().start()
|
||||
|
||||
When using this interface, an `IOLoop` must *not* be passed
|
||||
to the `HTTPServer` constructor. `start` will always start
|
||||
the server on the default singleton `IOLoop`.
|
||||
|
||||
3. `~tornado.netutil.TCPServer.add_sockets`: advanced multi-process::
|
||||
|
||||
sockets = tornado.netutil.bind_sockets(8888)
|
||||
tornado.process.fork_processes(0)
|
||||
server = HTTPServer(app)
|
||||
server.add_sockets(sockets)
|
||||
IOLoop.instance().start()
|
||||
|
||||
The `add_sockets` interface is more complicated, but it can be
|
||||
used with `tornado.process.fork_processes` to give you more
|
||||
flexibility in when the fork happens. `add_sockets` can
|
||||
also be used in single-process servers if you want to create
|
||||
your listening sockets in some way other than
|
||||
`tornado.netutil.bind_sockets`.
|
||||
|
||||
"""
|
||||
def __init__(self, request_callback, no_keep_alive=False, io_loop=None,
|
||||
xheaders=False, ssl_options=None, **kwargs):
|
||||
self.request_callback = request_callback
|
||||
self.no_keep_alive = no_keep_alive
|
||||
self.xheaders = xheaders
|
||||
TCPServer.__init__(self, io_loop=io_loop, ssl_options=ssl_options,
|
||||
**kwargs)
|
||||
|
||||
def handle_stream(self, stream, address):
|
||||
HTTPConnection(stream, address, self.request_callback,
|
||||
self.no_keep_alive, self.xheaders)
|
||||
|
||||
|
||||
class _BadRequestException(Exception):
|
||||
"""Exception class for malformed HTTP requests."""
|
||||
pass
|
||||
|
||||
|
||||
class HTTPConnection(object):
|
||||
"""Handles a connection to an HTTP client, executing HTTP requests.
|
||||
|
||||
We parse HTTP headers and bodies, and execute the request callback
|
||||
until the HTTP conection is closed.
|
||||
"""
|
||||
def __init__(self, stream, address, request_callback, no_keep_alive=False,
|
||||
xheaders=False):
|
||||
self.stream = stream
|
||||
self.address = address
|
||||
self.request_callback = request_callback
|
||||
self.no_keep_alive = no_keep_alive
|
||||
self.xheaders = xheaders
|
||||
self._request = None
|
||||
self._request_finished = False
|
||||
# Save stack context here, outside of any request. This keeps
|
||||
# contexts from one request from leaking into the next.
|
||||
self._header_callback = stack_context.wrap(self._on_headers)
|
||||
self.stream.read_until(b("\r\n\r\n"), self._header_callback)
|
||||
self._write_callback = None
|
||||
|
||||
def write(self, chunk, callback=None):
|
||||
"""Writes a chunk of output to the stream."""
|
||||
assert self._request, "Request closed"
|
||||
if not self.stream.closed():
|
||||
self._write_callback = stack_context.wrap(callback)
|
||||
self.stream.write(chunk, self._on_write_complete)
|
||||
|
||||
def finish(self):
|
||||
"""Finishes the request."""
|
||||
assert self._request, "Request closed"
|
||||
self._request_finished = True
|
||||
if not self.stream.writing():
|
||||
self._finish_request()
|
||||
|
||||
def _on_write_complete(self):
|
||||
if self._write_callback is not None:
|
||||
callback = self._write_callback
|
||||
self._write_callback = None
|
||||
callback()
|
||||
# _on_write_complete is enqueued on the IOLoop whenever the
|
||||
# IOStream's write buffer becomes empty, but it's possible for
|
||||
# another callback that runs on the IOLoop before it to
|
||||
# simultaneously write more data and finish the request. If
|
||||
# there is still data in the IOStream, a future
|
||||
# _on_write_complete will be responsible for calling
|
||||
# _finish_request.
|
||||
if self._request_finished and not self.stream.writing():
|
||||
self._finish_request()
|
||||
|
||||
def _finish_request(self):
|
||||
if self.no_keep_alive:
|
||||
disconnect = True
|
||||
else:
|
||||
connection_header = self._request.headers.get("Connection")
|
||||
if connection_header is not None:
|
||||
connection_header = connection_header.lower()
|
||||
if self._request.supports_http_1_1():
|
||||
disconnect = connection_header == "close"
|
||||
elif ("Content-Length" in self._request.headers
|
||||
or self._request.method in ("HEAD", "GET")):
|
||||
disconnect = connection_header != "keep-alive"
|
||||
else:
|
||||
disconnect = True
|
||||
self._request = None
|
||||
self._request_finished = False
|
||||
if disconnect:
|
||||
self.stream.close()
|
||||
return
|
||||
self.stream.read_until(b("\r\n\r\n"), self._header_callback)
|
||||
|
||||
def _on_headers(self, data):
|
||||
try:
|
||||
data = native_str(data.decode('latin1'))
|
||||
eol = data.find("\r\n")
|
||||
start_line = data[:eol]
|
||||
try:
|
||||
method, uri, version = start_line.split(" ")
|
||||
except ValueError:
|
||||
raise _BadRequestException("Malformed HTTP request line")
|
||||
if not version.startswith("HTTP/"):
|
||||
raise _BadRequestException("Malformed HTTP version in HTTP Request-Line")
|
||||
headers = httputil.HTTPHeaders.parse(data[eol:])
|
||||
|
||||
# HTTPRequest wants an IP, not a full socket address
|
||||
if getattr(self.stream.socket, 'family', socket.AF_INET) in (
|
||||
socket.AF_INET, socket.AF_INET6):
|
||||
# Jython 2.5.2 doesn't have the socket.family attribute,
|
||||
# so just assume IP in that case.
|
||||
remote_ip = self.address[0]
|
||||
else:
|
||||
# Unix (or other) socket; fake the remote address
|
||||
remote_ip = '0.0.0.0'
|
||||
|
||||
self._request = HTTPRequest(
|
||||
connection=self, method=method, uri=uri, version=version,
|
||||
headers=headers, remote_ip=remote_ip)
|
||||
|
||||
content_length = headers.get("Content-Length")
|
||||
if content_length:
|
||||
content_length = int(content_length)
|
||||
if content_length > self.stream.max_buffer_size:
|
||||
raise _BadRequestException("Content-Length too long")
|
||||
if headers.get("Expect") == "100-continue":
|
||||
self.stream.write(b("HTTP/1.1 100 (Continue)\r\n\r\n"))
|
||||
self.stream.read_bytes(content_length, self._on_request_body)
|
||||
return
|
||||
|
||||
self.request_callback(self._request)
|
||||
except _BadRequestException, e:
|
||||
logging.info("Malformed HTTP request from %s: %s",
|
||||
self.address[0], e)
|
||||
self.stream.close()
|
||||
return
|
||||
|
||||
def _on_request_body(self, data):
|
||||
self._request.body = data
|
||||
content_type = self._request.headers.get("Content-Type", "")
|
||||
if self._request.method in ("POST", "PATCH", "PUT"):
|
||||
if content_type.startswith("application/x-www-form-urlencoded"):
|
||||
arguments = parse_qs_bytes(native_str(self._request.body))
|
||||
for name, values in arguments.iteritems():
|
||||
values = [v for v in values if v]
|
||||
if values:
|
||||
self._request.arguments.setdefault(name, []).extend(
|
||||
values)
|
||||
elif content_type.startswith("multipart/form-data"):
|
||||
fields = content_type.split(";")
|
||||
for field in fields:
|
||||
k, sep, v = field.strip().partition("=")
|
||||
if k == "boundary" and v:
|
||||
httputil.parse_multipart_form_data(
|
||||
utf8(v), data,
|
||||
self._request.arguments,
|
||||
self._request.files)
|
||||
break
|
||||
else:
|
||||
logging.warning("Invalid multipart/form-data")
|
||||
self.request_callback(self._request)
|
||||
|
||||
|
||||
class HTTPRequest(object):
|
||||
"""A single HTTP request.
|
||||
|
||||
All attributes are type `str` unless otherwise noted.
|
||||
|
||||
.. attribute:: method
|
||||
|
||||
HTTP request method, e.g. "GET" or "POST"
|
||||
|
||||
.. attribute:: uri
|
||||
|
||||
The requested uri.
|
||||
|
||||
.. attribute:: path
|
||||
|
||||
The path portion of `uri`
|
||||
|
||||
.. attribute:: query
|
||||
|
||||
The query portion of `uri`
|
||||
|
||||
.. attribute:: version
|
||||
|
||||
HTTP version specified in request, e.g. "HTTP/1.1"
|
||||
|
||||
.. attribute:: headers
|
||||
|
||||
`HTTPHeader` dictionary-like object for request headers. Acts like
|
||||
a case-insensitive dictionary with additional methods for repeated
|
||||
headers.
|
||||
|
||||
.. attribute:: body
|
||||
|
||||
Request body, if present, as a byte string.
|
||||
|
||||
.. attribute:: remote_ip
|
||||
|
||||
Client's IP address as a string. If `HTTPServer.xheaders` is set,
|
||||
will pass along the real IP address provided by a load balancer
|
||||
in the ``X-Real-Ip`` header
|
||||
|
||||
.. attribute:: protocol
|
||||
|
||||
The protocol used, either "http" or "https". If `HTTPServer.xheaders`
|
||||
is set, will pass along the protocol used by a load balancer if
|
||||
reported via an ``X-Scheme`` header.
|
||||
|
||||
.. attribute:: host
|
||||
|
||||
The requested hostname, usually taken from the ``Host`` header.
|
||||
|
||||
.. attribute:: arguments
|
||||
|
||||
GET/POST arguments are available in the arguments property, which
|
||||
maps arguments names to lists of values (to support multiple values
|
||||
for individual names). Names are of type `str`, while arguments
|
||||
are byte strings. Note that this is different from
|
||||
`RequestHandler.get_argument`, which returns argument values as
|
||||
unicode strings.
|
||||
|
||||
.. attribute:: files
|
||||
|
||||
File uploads are available in the files property, which maps file
|
||||
names to lists of :class:`HTTPFile`.
|
||||
|
||||
.. attribute:: connection
|
||||
|
||||
An HTTP request is attached to a single HTTP connection, which can
|
||||
be accessed through the "connection" attribute. Since connections
|
||||
are typically kept open in HTTP/1.1, multiple requests can be handled
|
||||
sequentially on a single connection.
|
||||
"""
|
||||
def __init__(self, method, uri, version="HTTP/1.0", headers=None,
|
||||
body=None, remote_ip=None, protocol=None, host=None,
|
||||
files=None, connection=None):
|
||||
self.method = method
|
||||
self.uri = uri
|
||||
self.version = version
|
||||
self.headers = headers or httputil.HTTPHeaders()
|
||||
self.body = body or ""
|
||||
if connection and connection.xheaders:
|
||||
# Squid uses X-Forwarded-For, others use X-Real-Ip
|
||||
self.remote_ip = self.headers.get(
|
||||
"X-Real-Ip", self.headers.get("X-Forwarded-For", remote_ip))
|
||||
if not self._valid_ip(self.remote_ip):
|
||||
self.remote_ip = remote_ip
|
||||
# AWS uses X-Forwarded-Proto
|
||||
self.protocol = self.headers.get(
|
||||
"X-Scheme", self.headers.get("X-Forwarded-Proto", protocol))
|
||||
if self.protocol not in ("http", "https"):
|
||||
self.protocol = "http"
|
||||
else:
|
||||
self.remote_ip = remote_ip
|
||||
if protocol:
|
||||
self.protocol = protocol
|
||||
elif connection and isinstance(connection.stream,
|
||||
iostream.SSLIOStream):
|
||||
self.protocol = "https"
|
||||
else:
|
||||
self.protocol = "http"
|
||||
self.host = host or self.headers.get("Host") or "127.0.0.1"
|
||||
self.files = files or {}
|
||||
self.connection = connection
|
||||
self._start_time = time.time()
|
||||
self._finish_time = None
|
||||
|
||||
self.path, sep, self.query = uri.partition('?')
|
||||
arguments = parse_qs_bytes(self.query)
|
||||
self.arguments = {}
|
||||
for name, values in arguments.iteritems():
|
||||
values = [v for v in values if v]
|
||||
if values:
|
||||
self.arguments[name] = values
|
||||
|
||||
def supports_http_1_1(self):
|
||||
"""Returns True if this request supports HTTP/1.1 semantics"""
|
||||
return self.version == "HTTP/1.1"
|
||||
|
||||
@property
|
||||
def cookies(self):
|
||||
"""A dictionary of Cookie.Morsel objects."""
|
||||
if not hasattr(self, "_cookies"):
|
||||
self._cookies = Cookie.SimpleCookie()
|
||||
if "Cookie" in self.headers:
|
||||
try:
|
||||
self._cookies.load(
|
||||
native_str(self.headers["Cookie"]))
|
||||
except Exception:
|
||||
self._cookies = {}
|
||||
return self._cookies
|
||||
|
||||
def write(self, chunk, callback=None):
|
||||
"""Writes the given chunk to the response stream."""
|
||||
assert isinstance(chunk, bytes_type)
|
||||
self.connection.write(chunk, callback=callback)
|
||||
|
||||
def finish(self):
|
||||
"""Finishes this HTTP request on the open connection."""
|
||||
self.connection.finish()
|
||||
self._finish_time = time.time()
|
||||
|
||||
def full_url(self):
|
||||
"""Reconstructs the full URL for this request."""
|
||||
return self.protocol + "://" + self.host + self.uri
|
||||
|
||||
def request_time(self):
|
||||
"""Returns the amount of time it took for this request to execute."""
|
||||
if self._finish_time is None:
|
||||
return time.time() - self._start_time
|
||||
else:
|
||||
return self._finish_time - self._start_time
|
||||
|
||||
def get_ssl_certificate(self):
|
||||
"""Returns the client's SSL certificate, if any.
|
||||
|
||||
To use client certificates, the HTTPServer must have been constructed
|
||||
with cert_reqs set in ssl_options, e.g.::
|
||||
|
||||
server = HTTPServer(app,
|
||||
ssl_options=dict(
|
||||
certfile="foo.crt",
|
||||
keyfile="foo.key",
|
||||
cert_reqs=ssl.CERT_REQUIRED,
|
||||
ca_certs="cacert.crt"))
|
||||
|
||||
The return value is a dictionary, see SSLSocket.getpeercert() in
|
||||
the standard library for more details.
|
||||
http://docs.python.org/library/ssl.html#sslsocket-objects
|
||||
"""
|
||||
try:
|
||||
return self.connection.stream.socket.getpeercert()
|
||||
except ssl.SSLError:
|
||||
return None
|
||||
|
||||
def __repr__(self):
|
||||
attrs = ("protocol", "host", "method", "uri", "version", "remote_ip",
|
||||
"body")
|
||||
args = ", ".join(["%s=%r" % (n, getattr(self, n)) for n in attrs])
|
||||
return "%s(%s, headers=%s)" % (
|
||||
self.__class__.__name__, args, dict(self.headers))
|
||||
|
||||
def _valid_ip(self, ip):
|
||||
try:
|
||||
res = socket.getaddrinfo(ip, 0, socket.AF_UNSPEC,
|
||||
socket.SOCK_STREAM,
|
||||
0, socket.AI_NUMERICHOST)
|
||||
return bool(res)
|
||||
except socket.gaierror, e:
|
||||
if e.args[0] == socket.EAI_NONAME:
|
||||
return False
|
||||
raise
|
||||
return True
|
||||
703
test/fixtures/ruby/formula.rb
vendored
Normal file
703
test/fixtures/ruby/formula.rb
vendored
Normal file
@@ -0,0 +1,703 @@
|
||||
require 'download_strategy'
|
||||
require 'dependencies'
|
||||
require 'formula_support'
|
||||
require 'hardware'
|
||||
require 'bottles'
|
||||
require 'extend/fileutils'
|
||||
require 'patches'
|
||||
require 'compilers'
|
||||
|
||||
# Derive and define at least @url, see Library/Formula for examples
|
||||
class Formula
|
||||
include FileUtils
|
||||
|
||||
attr_reader :name, :path, :url, :version, :homepage, :specs, :downloader
|
||||
attr_reader :standard, :unstable, :head
|
||||
attr_reader :bottle_version, :bottle_url, :bottle_sha1
|
||||
|
||||
# The build folder, usually in /tmp.
|
||||
# Will only be non-nil during the stage method.
|
||||
attr_reader :buildpath
|
||||
|
||||
# Homebrew determines the name
|
||||
def initialize name='__UNKNOWN__', path=nil
|
||||
set_instance_variable 'homepage'
|
||||
set_instance_variable 'url'
|
||||
set_instance_variable 'bottle_version'
|
||||
set_instance_variable 'bottle_url'
|
||||
set_instance_variable 'bottle_sha1'
|
||||
set_instance_variable 'head'
|
||||
set_instance_variable 'specs'
|
||||
set_instance_variable 'standard'
|
||||
set_instance_variable 'unstable'
|
||||
|
||||
if @head and (not @url or ARGV.build_head?)
|
||||
@url = @head
|
||||
@version = 'HEAD'
|
||||
@spec_to_use = @unstable
|
||||
else
|
||||
if @standard.nil?
|
||||
@spec_to_use = SoftwareSpecification.new(@url, @specs)
|
||||
else
|
||||
@spec_to_use = @standard
|
||||
end
|
||||
end
|
||||
|
||||
raise "No url provided for formula #{name}" if @url.nil?
|
||||
@name = name
|
||||
validate_variable :name
|
||||
|
||||
# If we got an explicit path, use that, else determine from the name
|
||||
@path = path.nil? ? self.class.path(name) : Pathname.new(path)
|
||||
|
||||
# Use a provided version, if any
|
||||
set_instance_variable 'version'
|
||||
# Otherwise detect the version from the URL
|
||||
@version ||= @spec_to_use.detect_version
|
||||
# Only validate if a version was set; GitHubGistFormula needs to get
|
||||
# the URL to determine the version
|
||||
validate_variable :version if @version
|
||||
|
||||
CHECKSUM_TYPES.each { |type| set_instance_variable type }
|
||||
|
||||
@downloader = download_strategy.new @spec_to_use.url, name, version, @spec_to_use.specs
|
||||
|
||||
@bottle_url ||= bottle_base_url + bottle_filename(self) if @bottle_sha1
|
||||
end
|
||||
|
||||
# if the dir is there, but it's empty we consider it not installed
|
||||
def installed?
|
||||
return installed_prefix.children.length > 0
|
||||
rescue
|
||||
return false
|
||||
end
|
||||
|
||||
def explicitly_requested?
|
||||
# `ARGV.formulae` will throw an exception if it comes up with an empty list.
|
||||
# FIXME: `ARGV.formulae` shouldn't be throwing exceptions, see issue #8823
|
||||
return false if ARGV.named.empty?
|
||||
ARGV.formulae.include? self
|
||||
end
|
||||
|
||||
def linked_keg
|
||||
HOMEBREW_REPOSITORY/'Library/LinkedKegs'/@name
|
||||
end
|
||||
|
||||
def installed_prefix
|
||||
head_prefix = HOMEBREW_CELLAR+@name+'HEAD'
|
||||
if @version == 'HEAD' || head_prefix.directory?
|
||||
head_prefix
|
||||
else
|
||||
prefix
|
||||
end
|
||||
end
|
||||
|
||||
def prefix
|
||||
validate_variable :name
|
||||
validate_variable :version
|
||||
HOMEBREW_CELLAR+@name+@version
|
||||
end
|
||||
def rack; prefix.parent end
|
||||
|
||||
def bin; prefix+'bin' end
|
||||
def doc; prefix+'share/doc'+name end
|
||||
def include; prefix+'include' end
|
||||
def info; prefix+'share/info' end
|
||||
def lib; prefix+'lib' end
|
||||
def libexec; prefix+'libexec' end
|
||||
def man; prefix+'share/man' end
|
||||
def man1; man+'man1' end
|
||||
def man2; man+'man2' end
|
||||
def man3; man+'man3' end
|
||||
def man4; man+'man4' end
|
||||
def man5; man+'man5' end
|
||||
def man6; man+'man6' end
|
||||
def man7; man+'man7' end
|
||||
def man8; man+'man8' end
|
||||
def sbin; prefix+'sbin' end
|
||||
def share; prefix+'share' end
|
||||
|
||||
# configuration needs to be preserved past upgrades
|
||||
def etc; HOMEBREW_PREFIX+'etc' end
|
||||
# generally we don't want var stuff inside the keg
|
||||
def var; HOMEBREW_PREFIX+'var' end
|
||||
|
||||
# plist name, i.e. the name of the launchd service
|
||||
def plist_name; 'homebrew.mxcl.'+name end
|
||||
def plist_path; prefix+(plist_name+'.plist') end
|
||||
|
||||
# Use the @spec_to_use to detect the download strategy.
|
||||
# Can be overriden to force a custom download strategy
|
||||
def download_strategy
|
||||
@spec_to_use.download_strategy
|
||||
end
|
||||
|
||||
def cached_download
|
||||
@downloader.cached_location
|
||||
end
|
||||
|
||||
# tell the user about any caveats regarding this package, return a string
|
||||
def caveats; nil end
|
||||
|
||||
# any e.g. configure options for this package
|
||||
def options; [] end
|
||||
|
||||
# patches are automatically applied after extracting the tarball
|
||||
# return an array of strings, or if you need a patch level other than -p1
|
||||
# return a Hash eg.
|
||||
# {
|
||||
# :p0 => ['http://foo.com/patch1', 'http://foo.com/patch2'],
|
||||
# :p1 => 'http://bar.com/patch2',
|
||||
# :p2 => ['http://moo.com/patch5', 'http://moo.com/patch6']
|
||||
# }
|
||||
# The final option is to return DATA, then put a diff after __END__. You
|
||||
# can still return a Hash with DATA as the value for a patch level key.
|
||||
def patches; end
|
||||
|
||||
# rarely, you don't want your library symlinked into the main prefix
|
||||
# see gettext.rb for an example
|
||||
def keg_only?
|
||||
self.class.keg_only_reason || false
|
||||
end
|
||||
|
||||
def fails_with? cc
|
||||
return false if self.class.cc_failures.nil?
|
||||
cc = Compiler.new(cc) unless cc.is_a? Compiler
|
||||
return self.class.cc_failures.find do |failure|
|
||||
next unless failure.compiler == cc.name
|
||||
failure.build.zero? or failure.build >= cc.build
|
||||
end
|
||||
end
|
||||
|
||||
# sometimes the clean process breaks things
|
||||
# skip cleaning paths in a formula with a class method like this:
|
||||
# skip_clean [bin+"foo", lib+"bar"]
|
||||
# redefining skip_clean? now deprecated
|
||||
def skip_clean? path
|
||||
return true if self.class.skip_clean_all?
|
||||
to_check = path.relative_path_from(prefix).to_s
|
||||
self.class.skip_clean_paths.include? to_check
|
||||
end
|
||||
|
||||
# yields self with current working directory set to the uncompressed tarball
|
||||
def brew
|
||||
validate_variable :name
|
||||
validate_variable :version
|
||||
|
||||
stage do
|
||||
begin
|
||||
patch
|
||||
# we allow formulas to do anything they want to the Ruby process
|
||||
# so load any deps before this point! And exit asap afterwards
|
||||
yield self
|
||||
rescue Interrupt, RuntimeError, SystemCallError => e
|
||||
puts if Interrupt === e # don't print next to the ^C
|
||||
unless ARGV.debug?
|
||||
%w(config.log CMakeCache.txt).select{|f| File.exist? f}.each do |f|
|
||||
HOMEBREW_LOGS.install f
|
||||
puts "#{f} was copied to #{HOMEBREW_LOGS}"
|
||||
end
|
||||
raise
|
||||
end
|
||||
onoe e.inspect
|
||||
puts e.backtrace
|
||||
|
||||
ohai "Rescuing build..."
|
||||
if (e.was_running_configure? rescue false) and File.exist? 'config.log'
|
||||
puts "It looks like an autotools configure failed."
|
||||
puts "Gist 'config.log' and any error output when reporting an issue."
|
||||
puts
|
||||
end
|
||||
|
||||
puts "When you exit this shell Homebrew will attempt to finalise the installation."
|
||||
puts "If nothing is installed or the shell exits with a non-zero error code,"
|
||||
puts "Homebrew will abort. The installation prefix is:"
|
||||
puts prefix
|
||||
interactive_shell self
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def == b
|
||||
name == b.name
|
||||
end
|
||||
def eql? b
|
||||
self == b and self.class.equal? b.class
|
||||
end
|
||||
def hash
|
||||
name.hash
|
||||
end
|
||||
def <=> b
|
||||
name <=> b.name
|
||||
end
|
||||
def to_s
|
||||
name
|
||||
end
|
||||
|
||||
# Standard parameters for CMake builds.
|
||||
# Using Build Type "None" tells cmake to use our CFLAGS,etc. settings.
|
||||
# Setting it to Release would ignore our flags.
|
||||
# Setting CMAKE_FIND_FRAMEWORK to "LAST" tells CMake to search for our
|
||||
# libraries before trying to utilize Frameworks, many of which will be from
|
||||
# 3rd party installs.
|
||||
# Note: there isn't a std_autotools variant because autotools is a lot
|
||||
# less consistent and the standard parameters are more memorable.
|
||||
def std_cmake_args
|
||||
%W[
|
||||
-DCMAKE_INSTALL_PREFIX=#{prefix}
|
||||
-DCMAKE_BUILD_TYPE=None
|
||||
-DCMAKE_FIND_FRAMEWORK=LAST
|
||||
-Wno-dev
|
||||
]
|
||||
end
|
||||
|
||||
def self.class_s name
|
||||
#remove invalid characters and then camelcase it
|
||||
name.capitalize.gsub(/[-_.\s]([a-zA-Z0-9])/) { $1.upcase } \
|
||||
.gsub('+', 'x')
|
||||
end
|
||||
|
||||
# an array of all Formula names
|
||||
def self.names
|
||||
Dir["#{HOMEBREW_REPOSITORY}/Library/Formula/*.rb"].map{ |f| File.basename f, '.rb' }.sort
|
||||
end
|
||||
|
||||
# an array of all Formula, instantiated
|
||||
def self.all
|
||||
map{ |f| f }
|
||||
end
|
||||
def self.map
|
||||
rv = []
|
||||
each{ |f| rv << yield(f) }
|
||||
rv
|
||||
end
|
||||
def self.each
|
||||
names.each do |n|
|
||||
begin
|
||||
yield Formula.factory(n)
|
||||
rescue
|
||||
# Don't let one broken formula break commands. But do complain.
|
||||
onoe "Formula #{n} will not import."
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def inspect
|
||||
name
|
||||
end
|
||||
|
||||
def self.aliases
|
||||
Dir["#{HOMEBREW_REPOSITORY}/Library/Aliases/*"].map{ |f| File.basename f }.sort
|
||||
end
|
||||
|
||||
def self.canonical_name name
|
||||
name = name.to_s if name.kind_of? Pathname
|
||||
|
||||
formula_with_that_name = HOMEBREW_REPOSITORY+"Library/Formula/#{name}.rb"
|
||||
possible_alias = HOMEBREW_REPOSITORY+"Library/Aliases/#{name}"
|
||||
possible_cached_formula = HOMEBREW_CACHE_FORMULA+"#{name}.rb"
|
||||
|
||||
if name.include? "/"
|
||||
if name =~ %r{(.+)/(.+)/(.+)}
|
||||
tapd = HOMEBREW_REPOSITORY/"Library/Taps"/"#$1-#$2".downcase
|
||||
tapd.find_formula do |relative_pathname|
|
||||
return "#{tapd}/#{relative_pathname}" if relative_pathname.stem.to_s == $3
|
||||
end if tapd.directory?
|
||||
end
|
||||
# Otherwise don't resolve paths or URLs
|
||||
name
|
||||
elsif formula_with_that_name.file? and formula_with_that_name.readable?
|
||||
name
|
||||
elsif possible_alias.file?
|
||||
possible_alias.realpath.basename('.rb').to_s
|
||||
elsif possible_cached_formula.file?
|
||||
possible_cached_formula.to_s
|
||||
else
|
||||
name
|
||||
end
|
||||
end
|
||||
|
||||
def self.factory name
|
||||
# If an instance of Formula is passed, just return it
|
||||
return name if name.kind_of? Formula
|
||||
|
||||
# Otherwise, convert to String in case a Pathname comes in
|
||||
name = name.to_s
|
||||
|
||||
# If a URL is passed, download to the cache and install
|
||||
if name =~ %r[(https?|ftp)://]
|
||||
url = name
|
||||
name = Pathname.new(name).basename
|
||||
target_file = HOMEBREW_CACHE_FORMULA+name
|
||||
name = name.basename(".rb").to_s
|
||||
|
||||
HOMEBREW_CACHE_FORMULA.mkpath
|
||||
FileUtils.rm target_file, :force => true
|
||||
curl url, '-o', target_file
|
||||
|
||||
require target_file
|
||||
install_type = :from_url
|
||||
else
|
||||
name = Formula.canonical_name(name)
|
||||
# If name was a path or mapped to a cached formula
|
||||
if name.include? "/"
|
||||
require name
|
||||
|
||||
# require allows filenames to drop the .rb extension, but everything else
|
||||
# in our codebase will require an exact and fullpath.
|
||||
name = "#{name}.rb" unless name =~ /\.rb$/
|
||||
|
||||
path = Pathname.new(name)
|
||||
name = path.stem
|
||||
install_type = :from_path
|
||||
target_file = path.to_s
|
||||
else
|
||||
# For names, map to the path and then require
|
||||
require Formula.path(name)
|
||||
install_type = :from_name
|
||||
end
|
||||
end
|
||||
|
||||
begin
|
||||
klass_name = self.class_s(name)
|
||||
klass = Object.const_get klass_name
|
||||
rescue NameError
|
||||
# TODO really this text should be encoded into the exception
|
||||
# and only shown if the UI deems it correct to show it
|
||||
onoe "class \"#{klass_name}\" expected but not found in #{name}.rb"
|
||||
puts "Double-check the name of the class in that formula."
|
||||
raise LoadError
|
||||
end
|
||||
|
||||
return klass.new(name) if install_type == :from_name
|
||||
return klass.new(name, target_file)
|
||||
rescue LoadError
|
||||
raise FormulaUnavailableError.new(name)
|
||||
end
|
||||
|
||||
def tap
|
||||
if path.realpath.to_s =~ %r{#{HOMEBREW_REPOSITORY}/Library/Taps/(\w+)-(\w+)}
|
||||
"#$1/#$2"
|
||||
else
|
||||
# remotely installed formula are not mxcl/master but this will do for now
|
||||
"mxcl/master"
|
||||
end
|
||||
end
|
||||
|
||||
def self.path name
|
||||
HOMEBREW_REPOSITORY+"Library/Formula/#{name.downcase}.rb"
|
||||
end
|
||||
|
||||
def mirrors; self.class.mirrors or []; end
|
||||
|
||||
def deps; self.class.dependencies.deps; end
|
||||
def external_deps; self.class.dependencies.external_deps; end
|
||||
|
||||
# deps are in an installable order
|
||||
# which means if a depends on b then b will be ordered before a in this list
|
||||
def recursive_deps
|
||||
Formula.expand_deps(self).flatten.uniq
|
||||
end
|
||||
|
||||
def self.expand_deps f
|
||||
f.deps.map do |dep|
|
||||
f_dep = Formula.factory dep.to_s
|
||||
expand_deps(f_dep) << f_dep
|
||||
end
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
# Pretty titles the command and buffers stdout/stderr
|
||||
# Throws if there's an error
|
||||
def system cmd, *args
|
||||
# remove "boring" arguments so that the important ones are more likely to
|
||||
# be shown considering that we trim long ohai lines to the terminal width
|
||||
pretty_args = args.dup
|
||||
pretty_args.delete "--disable-dependency-tracking" if cmd == "./configure" and not ARGV.verbose?
|
||||
ohai "#{cmd} #{pretty_args*' '}".strip
|
||||
|
||||
removed_ENV_variables = case if args.empty? then cmd.split(' ').first else cmd end
|
||||
when "xcodebuild"
|
||||
ENV.remove_cc_etc
|
||||
end
|
||||
|
||||
if ARGV.verbose?
|
||||
safe_system cmd, *args
|
||||
else
|
||||
rd, wr = IO.pipe
|
||||
pid = fork do
|
||||
rd.close
|
||||
$stdout.reopen wr
|
||||
$stderr.reopen wr
|
||||
args.collect!{|arg| arg.to_s}
|
||||
exec(cmd, *args) rescue nil
|
||||
exit! 1 # never gets here unless exec threw or failed
|
||||
end
|
||||
wr.close
|
||||
out = ''
|
||||
out << rd.read until rd.eof?
|
||||
Process.wait
|
||||
unless $?.success?
|
||||
puts out
|
||||
raise
|
||||
end
|
||||
end
|
||||
|
||||
removed_ENV_variables.each do |key, value|
|
||||
ENV[key] = value # ENV.kind_of? Hash # => false
|
||||
end if removed_ENV_variables
|
||||
|
||||
rescue
|
||||
raise BuildError.new(self, cmd, args, $?)
|
||||
end
|
||||
|
||||
public
|
||||
|
||||
# For brew-fetch and others.
|
||||
def fetch
|
||||
if install_bottle? self
|
||||
downloader = CurlBottleDownloadStrategy.new bottle_url, name, version, nil
|
||||
mirror_list = []
|
||||
else
|
||||
downloader = @downloader
|
||||
# Don't attempt mirrors if this install is not pointed at a "stable" URL.
|
||||
# This can happen when options like `--HEAD` are invoked.
|
||||
mirror_list = @spec_to_use == @standard ? mirrors : []
|
||||
end
|
||||
|
||||
# Ensure the cache exists
|
||||
HOMEBREW_CACHE.mkpath
|
||||
|
||||
begin
|
||||
fetched = downloader.fetch
|
||||
rescue CurlDownloadStrategyError => e
|
||||
raise e if mirror_list.empty?
|
||||
puts "Trying a mirror..."
|
||||
url, specs = mirror_list.shift.values_at :url, :specs
|
||||
downloader = download_strategy.new url, name, version, specs
|
||||
retry
|
||||
end
|
||||
|
||||
return fetched, downloader
|
||||
end
|
||||
|
||||
# Detect which type of checksum is being used, or nil if none
|
||||
def checksum_type
|
||||
CHECKSUM_TYPES.detect { |type| instance_variable_defined?("@#{type}") }
|
||||
end
|
||||
|
||||
# For FormulaInstaller.
|
||||
def verify_download_integrity fn, *args
|
||||
require 'digest'
|
||||
if args.length != 2
|
||||
type = checksum_type || :md5
|
||||
supplied = instance_variable_get("@#{type}")
|
||||
# Convert symbol to readable string
|
||||
type = type.to_s.upcase
|
||||
else
|
||||
supplied, type = args
|
||||
end
|
||||
|
||||
hasher = Digest.const_get(type)
|
||||
hash = fn.incremental_hash(hasher)
|
||||
|
||||
if supplied and not supplied.empty?
|
||||
message = <<-EOF
|
||||
#{type} mismatch
|
||||
Expected: #{supplied}
|
||||
Got: #{hash}
|
||||
Archive: #{fn}
|
||||
(To retry an incomplete download, remove the file above.)
|
||||
EOF
|
||||
raise message unless supplied.upcase == hash.upcase
|
||||
else
|
||||
opoo "Cannot verify package integrity"
|
||||
puts "The formula did not provide a download checksum"
|
||||
puts "For your reference the #{type} is: #{hash}"
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
CHECKSUM_TYPES=[:md5, :sha1, :sha256].freeze
|
||||
|
||||
def stage
|
||||
fetched, downloader = fetch
|
||||
verify_download_integrity fetched if fetched.kind_of? Pathname
|
||||
mktemp do
|
||||
downloader.stage
|
||||
# Set path after the downloader changes the working folder.
|
||||
@buildpath = Pathname.pwd
|
||||
yield
|
||||
@buildpath = nil
|
||||
end
|
||||
end
|
||||
|
||||
def patch
|
||||
patch_list = Patches.new(patches)
|
||||
return if patch_list.empty?
|
||||
|
||||
if patch_list.external_patches?
|
||||
ohai "Downloading patches"
|
||||
patch_list.download!
|
||||
end
|
||||
|
||||
ohai "Patching"
|
||||
patch_list.each do |p|
|
||||
case p.compression
|
||||
when :gzip then safe_system "/usr/bin/gunzip", p.compressed_filename
|
||||
when :bzip2 then safe_system "/usr/bin/bunzip2", p.compressed_filename
|
||||
end
|
||||
# -f means don't prompt the user if there are errors; just exit with non-zero status
|
||||
safe_system '/usr/bin/patch', '-f', *(p.patch_args)
|
||||
end
|
||||
end
|
||||
|
||||
def validate_variable name
|
||||
v = instance_variable_get("@#{name}")
|
||||
raise "Invalid @#{name}" if v.to_s.empty? or v =~ /\s/
|
||||
end
|
||||
|
||||
def set_instance_variable(type)
|
||||
return if instance_variable_defined? "@#{type}"
|
||||
class_value = self.class.send(type)
|
||||
instance_variable_set("@#{type}", class_value) if class_value
|
||||
end
|
||||
|
||||
def self.method_added method
|
||||
raise 'You cannot override Formula.brew' if method == :brew
|
||||
end
|
||||
|
||||
class << self
|
||||
# The methods below define the formula DSL.
|
||||
attr_reader :standard, :unstable
|
||||
|
||||
def self.attr_rw(*attrs)
|
||||
attrs.each do |attr|
|
||||
class_eval %Q{
|
||||
def #{attr}(val=nil)
|
||||
val.nil? ? @#{attr} : @#{attr} = val
|
||||
end
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
attr_rw :version, :homepage, :mirrors, :specs
|
||||
attr_rw :keg_only_reason, :skip_clean_all, :cc_failures
|
||||
attr_rw :bottle_version, :bottle_url, :bottle_sha1
|
||||
attr_rw(*CHECKSUM_TYPES)
|
||||
|
||||
def head val=nil, specs=nil
|
||||
return @head if val.nil?
|
||||
@unstable = SoftwareSpecification.new(val, specs)
|
||||
@head = val
|
||||
@specs = specs
|
||||
end
|
||||
|
||||
def url val=nil, specs=nil
|
||||
return @url if val.nil?
|
||||
@standard = SoftwareSpecification.new(val, specs)
|
||||
@url = val
|
||||
@specs = specs
|
||||
end
|
||||
|
||||
def stable &block
|
||||
raise "url and md5 must be specified in a block" unless block_given?
|
||||
instance_eval(&block) unless ARGV.build_devel? or ARGV.build_head?
|
||||
end
|
||||
|
||||
def devel &block
|
||||
raise "url and md5 must be specified in a block" unless block_given?
|
||||
if ARGV.build_devel?
|
||||
@mirrors = nil # clear out mirrors from the stable release
|
||||
instance_eval(&block)
|
||||
end
|
||||
end
|
||||
|
||||
def bottle url=nil, &block
|
||||
return unless block_given?
|
||||
|
||||
bottle_block = Class.new do
|
||||
def self.version version
|
||||
@version = version
|
||||
end
|
||||
|
||||
def self.url url
|
||||
@url = url
|
||||
end
|
||||
|
||||
def self.sha1 sha1
|
||||
case sha1
|
||||
when Hash
|
||||
key, value = sha1.shift
|
||||
@sha1 = key if value == MacOS.cat
|
||||
when String
|
||||
@sha1 = sha1 if MacOS.lion?
|
||||
end
|
||||
end
|
||||
|
||||
def self.data
|
||||
@version = 0 unless @version
|
||||
return @version, @url, @sha1 if @sha1 && @url
|
||||
return @version, nil, @sha1 if @sha1
|
||||
end
|
||||
end
|
||||
|
||||
bottle_block.instance_eval(&block)
|
||||
@bottle_version, @bottle_url, @bottle_sha1 = bottle_block.data
|
||||
end
|
||||
|
||||
def mirror val, specs=nil
|
||||
@mirrors ||= []
|
||||
@mirrors << {:url => val, :specs => specs}
|
||||
# Added the uniq after some inspection with Pry---seems `mirror` gets
|
||||
# called three times. The first two times only one copy of the input is
|
||||
# left in `@mirrors`. On the final call, two copies are present. This
|
||||
# happens with `@deps` as well. Odd.
|
||||
@mirrors.uniq!
|
||||
end
|
||||
|
||||
def dependencies
|
||||
@dependencies ||= DependencyCollector.new
|
||||
end
|
||||
|
||||
def depends_on dep
|
||||
dependencies.add(dep)
|
||||
end
|
||||
|
||||
def skip_clean paths
|
||||
if paths == :all
|
||||
@skip_clean_all = true
|
||||
return
|
||||
end
|
||||
@skip_clean_paths ||= []
|
||||
[paths].flatten.each do |p|
|
||||
@skip_clean_paths << p.to_s unless @skip_clean_paths.include? p.to_s
|
||||
end
|
||||
end
|
||||
|
||||
def skip_clean_all?
|
||||
@skip_clean_all
|
||||
end
|
||||
|
||||
def skip_clean_paths
|
||||
@skip_clean_paths or []
|
||||
end
|
||||
|
||||
def keg_only reason, explanation=nil
|
||||
@keg_only_reason = KegOnlyReason.new(reason, explanation.to_s.chomp)
|
||||
end
|
||||
|
||||
def fails_with compiler, &block
|
||||
@cc_failures ||= CompilerFailures.new
|
||||
@cc_failures << if block_given?
|
||||
CompilerFailure.new(compiler, &block)
|
||||
else
|
||||
CompilerFailure.new(compiler)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
require 'formula_specialties'
|
||||
323
test/fixtures/ruby/inflector.rb
vendored
Normal file
323
test/fixtures/ruby/inflector.rb
vendored
Normal file
@@ -0,0 +1,323 @@
|
||||
# encoding: utf-8
|
||||
|
||||
require 'active_support/inflector/inflections'
|
||||
|
||||
module ActiveSupport
|
||||
# The Inflector transforms words from singular to plural, class names to table names, modularized class names to ones without,
|
||||
# and class names to foreign keys. The default inflections for pluralization, singularization, and uncountable words are kept
|
||||
# in inflections.rb.
|
||||
#
|
||||
# The Rails core team has stated patches for the inflections library will not be accepted
|
||||
# in order to avoid breaking legacy applications which may be relying on errant inflections.
|
||||
# If you discover an incorrect inflection and require it for your application, you'll need
|
||||
# to correct it yourself (explained below).
|
||||
module Inflector
|
||||
extend self
|
||||
|
||||
# Returns the plural form of the word in the string.
|
||||
#
|
||||
# "post".pluralize # => "posts"
|
||||
# "octopus".pluralize # => "octopi"
|
||||
# "sheep".pluralize # => "sheep"
|
||||
# "words".pluralize # => "words"
|
||||
# "CamelOctopus".pluralize # => "CamelOctopi"
|
||||
def pluralize(word)
|
||||
apply_inflections(word, inflections.plurals)
|
||||
end
|
||||
|
||||
# The reverse of +pluralize+, returns the singular form of a word in a string.
|
||||
#
|
||||
# "posts".singularize # => "post"
|
||||
# "octopi".singularize # => "octopus"
|
||||
# "sheep".singularize # => "sheep"
|
||||
# "word".singularize # => "word"
|
||||
# "CamelOctopi".singularize # => "CamelOctopus"
|
||||
def singularize(word)
|
||||
apply_inflections(word, inflections.singulars)
|
||||
end
|
||||
|
||||
# By default, +camelize+ converts strings to UpperCamelCase. If the argument to +camelize+
|
||||
# is set to <tt>:lower</tt> then +camelize+ produces lowerCamelCase.
|
||||
#
|
||||
# +camelize+ will also convert '/' to '::' which is useful for converting paths to namespaces.
|
||||
#
|
||||
# "active_model".camelize # => "ActiveModel"
|
||||
# "active_model".camelize(:lower) # => "activeModel"
|
||||
# "active_model/errors".camelize # => "ActiveModel::Errors"
|
||||
# "active_model/errors".camelize(:lower) # => "activeModel::Errors"
|
||||
#
|
||||
# As a rule of thumb you can think of +camelize+ as the inverse of +underscore+,
|
||||
# though there are cases where that does not hold:
|
||||
#
|
||||
# "SSLError".underscore.camelize # => "SslError"
|
||||
def camelize(term, uppercase_first_letter = true)
|
||||
string = term.to_s
|
||||
if uppercase_first_letter
|
||||
string = string.sub(/^[a-z\d]*/) { inflections.acronyms[$&] || $&.capitalize }
|
||||
else
|
||||
string = string.sub(/^(?:#{inflections.acronym_regex}(?=\b|[A-Z_])|\w)/) { $&.downcase }
|
||||
end
|
||||
string.gsub(/(?:_|(\/))([a-z\d]*)/i) { "#{$1}#{inflections.acronyms[$2] || $2.capitalize}" }.gsub('/', '::')
|
||||
end
|
||||
|
||||
# Makes an underscored, lowercase form from the expression in the string.
|
||||
#
|
||||
# Changes '::' to '/' to convert namespaces to paths.
|
||||
#
|
||||
# "ActiveModel".underscore # => "active_model"
|
||||
# "ActiveModel::Errors".underscore # => "active_model/errors"
|
||||
#
|
||||
# As a rule of thumb you can think of +underscore+ as the inverse of +camelize+,
|
||||
# though there are cases where that does not hold:
|
||||
#
|
||||
# "SSLError".underscore.camelize # => "SslError"
|
||||
def underscore(camel_cased_word)
|
||||
word = camel_cased_word.to_s.dup
|
||||
word.gsub!('::', '/')
|
||||
word.gsub!(/(?:([A-Za-z\d])|^)(#{inflections.acronym_regex})(?=\b|[^a-z])/) { "#{$1}#{$1 && '_'}#{$2.downcase}" }
|
||||
word.gsub!(/([A-Z\d]+)([A-Z][a-z])/,'\1_\2')
|
||||
word.gsub!(/([a-z\d])([A-Z])/,'\1_\2')
|
||||
word.tr!("-", "_")
|
||||
word.downcase!
|
||||
word
|
||||
end
|
||||
|
||||
# Capitalizes the first word and turns underscores into spaces and strips a
|
||||
# trailing "_id", if any. Like +titleize+, this is meant for creating pretty output.
|
||||
#
|
||||
# "employee_salary" # => "Employee salary"
|
||||
# "author_id" # => "Author"
|
||||
def humanize(lower_case_and_underscored_word)
|
||||
result = lower_case_and_underscored_word.to_s.dup
|
||||
inflections.humans.each { |(rule, replacement)| break if result.sub!(rule, replacement) }
|
||||
result.gsub!(/_id$/, "")
|
||||
result.tr!('_', ' ')
|
||||
result.gsub(/([a-z\d]*)/i) { |match|
|
||||
"#{inflections.acronyms[match] || match.downcase}"
|
||||
}.gsub(/^\w/) { $&.upcase }
|
||||
end
|
||||
|
||||
# Capitalizes all the words and replaces some characters in the string to create
|
||||
# a nicer looking title. +titleize+ is meant for creating pretty output. It is not
|
||||
# used in the Rails internals.
|
||||
#
|
||||
# +titleize+ is also aliased as +titlecase+.
|
||||
#
|
||||
# "man from the boondocks".titleize # => "Man From The Boondocks"
|
||||
# "x-men: the last stand".titleize # => "X Men: The Last Stand"
|
||||
# "TheManWithoutAPast".titleize # => "The Man Without A Past"
|
||||
# "raiders_of_the_lost_ark".titleize # => "Raiders Of The Lost Ark"
|
||||
def titleize(word)
|
||||
humanize(underscore(word)).gsub(/\b(?<!['’`])[a-z]/) { $&.capitalize }
|
||||
end
|
||||
|
||||
# Create the name of a table like Rails does for models to table names. This method
|
||||
# uses the +pluralize+ method on the last word in the string.
|
||||
#
|
||||
# "RawScaledScorer".tableize # => "raw_scaled_scorers"
|
||||
# "egg_and_ham".tableize # => "egg_and_hams"
|
||||
# "fancyCategory".tableize # => "fancy_categories"
|
||||
def tableize(class_name)
|
||||
pluralize(underscore(class_name))
|
||||
end
|
||||
|
||||
# Create a class name from a plural table name like Rails does for table names to models.
|
||||
# Note that this returns a string and not a Class. (To convert to an actual class
|
||||
# follow +classify+ with +constantize+.)
|
||||
#
|
||||
# "egg_and_hams".classify # => "EggAndHam"
|
||||
# "posts".classify # => "Post"
|
||||
#
|
||||
# Singular names are not handled correctly:
|
||||
# "business".classify # => "Busines"
|
||||
def classify(table_name)
|
||||
# strip out any leading schema name
|
||||
camelize(singularize(table_name.to_s.sub(/.*\./, '')))
|
||||
end
|
||||
|
||||
# Replaces underscores with dashes in the string.
|
||||
#
|
||||
# "puni_puni".dasherize # => "puni-puni"
|
||||
def dasherize(underscored_word)
|
||||
underscored_word.tr('_', '-')
|
||||
end
|
||||
|
||||
# Removes the module part from the expression in the string:
|
||||
#
|
||||
# "ActiveRecord::CoreExtensions::String::Inflections".demodulize # => "Inflections"
|
||||
# "Inflections".demodulize # => "Inflections"
|
||||
#
|
||||
# See also +deconstantize+.
|
||||
def demodulize(path)
|
||||
path = path.to_s
|
||||
if i = path.rindex('::')
|
||||
path[(i+2)..-1]
|
||||
else
|
||||
path
|
||||
end
|
||||
end
|
||||
|
||||
# Removes the rightmost segment from the constant expression in the string:
|
||||
#
|
||||
# "Net::HTTP".deconstantize # => "Net"
|
||||
# "::Net::HTTP".deconstantize # => "::Net"
|
||||
# "String".deconstantize # => ""
|
||||
# "::String".deconstantize # => ""
|
||||
# "".deconstantize # => ""
|
||||
#
|
||||
# See also +demodulize+.
|
||||
def deconstantize(path)
|
||||
path.to_s[0...(path.rindex('::') || 0)] # implementation based on the one in facets' Module#spacename
|
||||
end
|
||||
|
||||
# Creates a foreign key name from a class name.
|
||||
# +separate_class_name_and_id_with_underscore+ sets whether
|
||||
# the method should put '_' between the name and 'id'.
|
||||
#
|
||||
# "Message".foreign_key # => "message_id"
|
||||
# "Message".foreign_key(false) # => "messageid"
|
||||
# "Admin::Post".foreign_key # => "post_id"
|
||||
def foreign_key(class_name, separate_class_name_and_id_with_underscore = true)
|
||||
underscore(demodulize(class_name)) + (separate_class_name_and_id_with_underscore ? "_id" : "id")
|
||||
end
|
||||
|
||||
# Tries to find a constant with the name specified in the argument string:
|
||||
#
|
||||
# "Module".constantize # => Module
|
||||
# "Test::Unit".constantize # => Test::Unit
|
||||
#
|
||||
# The name is assumed to be the one of a top-level constant, no matter whether
|
||||
# it starts with "::" or not. No lexical context is taken into account:
|
||||
#
|
||||
# C = 'outside'
|
||||
# module M
|
||||
# C = 'inside'
|
||||
# C # => 'inside'
|
||||
# "C".constantize # => 'outside', same as ::C
|
||||
# end
|
||||
#
|
||||
# NameError is raised when the name is not in CamelCase or the constant is
|
||||
# unknown.
|
||||
def constantize(camel_cased_word)
|
||||
names = camel_cased_word.split('::')
|
||||
names.shift if names.empty? || names.first.empty?
|
||||
|
||||
names.inject(Object) do |constant, name|
|
||||
if constant == Object
|
||||
constant.const_get(name)
|
||||
else
|
||||
candidate = constant.const_get(name)
|
||||
next candidate if constant.const_defined?(name, false)
|
||||
next candidate unless Object.const_defined?(name)
|
||||
|
||||
# Go down the ancestors to check it it's owned
|
||||
# directly before we reach Object or the end of ancestors.
|
||||
constant = constant.ancestors.inject do |const, ancestor|
|
||||
break const if ancestor == Object
|
||||
break ancestor if ancestor.const_defined?(name, false)
|
||||
const
|
||||
end
|
||||
|
||||
# owner is in Object, so raise
|
||||
constant.const_get(name, false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Tries to find a constant with the name specified in the argument string:
|
||||
#
|
||||
# "Module".safe_constantize # => Module
|
||||
# "Test::Unit".safe_constantize # => Test::Unit
|
||||
#
|
||||
# The name is assumed to be the one of a top-level constant, no matter whether
|
||||
# it starts with "::" or not. No lexical context is taken into account:
|
||||
#
|
||||
# C = 'outside'
|
||||
# module M
|
||||
# C = 'inside'
|
||||
# C # => 'inside'
|
||||
# "C".safe_constantize # => 'outside', same as ::C
|
||||
# end
|
||||
#
|
||||
# nil is returned when the name is not in CamelCase or the constant (or part of it) is
|
||||
# unknown.
|
||||
#
|
||||
# "blargle".safe_constantize # => nil
|
||||
# "UnknownModule".safe_constantize # => nil
|
||||
# "UnknownModule::Foo::Bar".safe_constantize # => nil
|
||||
#
|
||||
def safe_constantize(camel_cased_word)
|
||||
begin
|
||||
constantize(camel_cased_word)
|
||||
rescue NameError => e
|
||||
raise unless e.message =~ /(uninitialized constant|wrong constant name) #{const_regexp(camel_cased_word)}$/ ||
|
||||
e.name.to_s == camel_cased_word.to_s
|
||||
rescue ArgumentError => e
|
||||
raise unless e.message =~ /not missing constant #{const_regexp(camel_cased_word)}\!$/
|
||||
end
|
||||
end
|
||||
|
||||
# Returns the suffix that should be added to a number to denote the position
|
||||
# in an ordered sequence such as 1st, 2nd, 3rd, 4th.
|
||||
#
|
||||
# ordinal(1) # => "st"
|
||||
# ordinal(2) # => "nd"
|
||||
# ordinal(1002) # => "nd"
|
||||
# ordinal(1003) # => "rd"
|
||||
# ordinal(-11) # => "th"
|
||||
# ordinal(-1021) # => "st"
|
||||
def ordinal(number)
|
||||
if (11..13).include?(number.to_i.abs % 100)
|
||||
"th"
|
||||
else
|
||||
case number.to_i.abs % 10
|
||||
when 1; "st"
|
||||
when 2; "nd"
|
||||
when 3; "rd"
|
||||
else "th"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Turns a number into an ordinal string used to denote the position in an
|
||||
# ordered sequence such as 1st, 2nd, 3rd, 4th.
|
||||
#
|
||||
# ordinalize(1) # => "1st"
|
||||
# ordinalize(2) # => "2nd"
|
||||
# ordinalize(1002) # => "1002nd"
|
||||
# ordinalize(1003) # => "1003rd"
|
||||
# ordinalize(-11) # => "-11th"
|
||||
# ordinalize(-1021) # => "-1021st"
|
||||
def ordinalize(number)
|
||||
"#{number}#{ordinal(number)}"
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Mount a regular expression that will match part by part of the constant.
|
||||
# For instance, Foo::Bar::Baz will generate Foo(::Bar(::Baz)?)?
|
||||
def const_regexp(camel_cased_word) #:nodoc:
|
||||
parts = camel_cased_word.split("::")
|
||||
last = parts.pop
|
||||
|
||||
parts.reverse.inject(last) do |acc, part|
|
||||
part.empty? ? acc : "#{part}(::#{acc})?"
|
||||
end
|
||||
end
|
||||
|
||||
# Applies inflection rules for +singularize+ and +pluralize+.
|
||||
#
|
||||
# apply_inflections("post", inflections.plurals) # => "posts"
|
||||
# apply_inflections("posts", inflections.singulars) # => "post"
|
||||
def apply_inflections(word, rules)
|
||||
result = word.to_s.dup
|
||||
|
||||
if word.empty? || inflections.uncountables.include?(result.downcase[/\b\w+\Z/])
|
||||
result
|
||||
else
|
||||
rules.each { |(rule, replacement)| break if result.sub!(rule, replacement) }
|
||||
result
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
138
test/fixtures/ruby/jekyll.rb
vendored
Normal file
138
test/fixtures/ruby/jekyll.rb
vendored
Normal file
@@ -0,0 +1,138 @@
|
||||
$:.unshift File.dirname(__FILE__) # For use/testing when no gem is installed
|
||||
|
||||
# Require all of the Ruby files in the given directory.
|
||||
#
|
||||
# path - The String relative path from here to the directory.
|
||||
#
|
||||
# Returns nothing.
|
||||
def require_all(path)
|
||||
glob = File.join(File.dirname(__FILE__), path, '*.rb')
|
||||
Dir[glob].each do |f|
|
||||
require f
|
||||
end
|
||||
end
|
||||
|
||||
# rubygems
|
||||
require 'rubygems'
|
||||
|
||||
# stdlib
|
||||
require 'fileutils'
|
||||
require 'time'
|
||||
require 'yaml'
|
||||
require 'English'
|
||||
|
||||
# 3rd party
|
||||
require 'liquid'
|
||||
require 'maruku'
|
||||
require 'albino'
|
||||
|
||||
# internal requires
|
||||
require 'jekyll/core_ext'
|
||||
require 'jekyll/site'
|
||||
require 'jekyll/convertible'
|
||||
require 'jekyll/layout'
|
||||
require 'jekyll/page'
|
||||
require 'jekyll/post'
|
||||
require 'jekyll/filters'
|
||||
require 'jekyll/static_file'
|
||||
require 'jekyll/errors'
|
||||
|
||||
# extensions
|
||||
require 'jekyll/plugin'
|
||||
require 'jekyll/converter'
|
||||
require 'jekyll/generator'
|
||||
require_all 'jekyll/converters'
|
||||
require_all 'jekyll/generators'
|
||||
require_all 'jekyll/tags'
|
||||
|
||||
module Jekyll
|
||||
VERSION = '0.11.2'
|
||||
|
||||
# Default options. Overriden by values in _config.yml or command-line opts.
|
||||
# (Strings rather symbols used for compatability with YAML).
|
||||
DEFAULTS = {
|
||||
'safe' => false,
|
||||
'auto' => false,
|
||||
'server' => false,
|
||||
'server_port' => 4000,
|
||||
|
||||
'source' => Dir.pwd,
|
||||
'destination' => File.join(Dir.pwd, '_site'),
|
||||
'plugins' => File.join(Dir.pwd, '_plugins'),
|
||||
|
||||
'future' => true,
|
||||
'lsi' => false,
|
||||
'pygments' => false,
|
||||
'markdown' => 'maruku',
|
||||
'permalink' => 'date',
|
||||
'include' => ['.htaccess'],
|
||||
'paginate_path' => 'page:num',
|
||||
|
||||
'markdown_ext' => 'markdown,mkd,mkdn,md',
|
||||
'textile_ext' => 'textile',
|
||||
|
||||
'maruku' => {
|
||||
'use_tex' => false,
|
||||
'use_divs' => false,
|
||||
'png_engine' => 'blahtex',
|
||||
'png_dir' => 'images/latex',
|
||||
'png_url' => '/images/latex'
|
||||
},
|
||||
'rdiscount' => {
|
||||
'extensions' => []
|
||||
},
|
||||
'redcarpet' => {
|
||||
'extensions' => []
|
||||
},
|
||||
'kramdown' => {
|
||||
'auto_ids' => true,
|
||||
'footnote_nr' => 1,
|
||||
'entity_output' => 'as_char',
|
||||
'toc_levels' => '1..6',
|
||||
'smart_quotes' => 'lsquo,rsquo,ldquo,rdquo',
|
||||
'use_coderay' => false,
|
||||
|
||||
'coderay' => {
|
||||
'coderay_wrap' => 'div',
|
||||
'coderay_line_numbers' => 'inline',
|
||||
'coderay_line_number_start' => 1,
|
||||
'coderay_tab_width' => 4,
|
||||
'coderay_bold_every' => 10,
|
||||
'coderay_css' => 'style'
|
||||
}
|
||||
},
|
||||
'redcloth' => {
|
||||
'hard_breaks' => true
|
||||
}
|
||||
}
|
||||
|
||||
# Public: Generate a Jekyll configuration Hash by merging the default
|
||||
# options with anything in _config.yml, and adding the given options on top.
|
||||
#
|
||||
# override - A Hash of config directives that override any options in both
|
||||
# the defaults and the config file. See Jekyll::DEFAULTS for a
|
||||
# list of option names and their defaults.
|
||||
#
|
||||
# Returns the final configuration Hash.
|
||||
def self.configuration(override)
|
||||
# _config.yml may override default source location, but until
|
||||
# then, we need to know where to look for _config.yml
|
||||
source = override['source'] || Jekyll::DEFAULTS['source']
|
||||
|
||||
# Get configuration from <source>/_config.yml
|
||||
config_file = File.join(source, '_config.yml')
|
||||
begin
|
||||
config = YAML.load_file(config_file)
|
||||
raise "Invalid configuration - #{config_file}" if !config.is_a?(Hash)
|
||||
$stdout.puts "Configuration from #{config_file}"
|
||||
rescue => err
|
||||
$stderr.puts "WARNING: Could not read configuration. " +
|
||||
"Using defaults (and options)."
|
||||
$stderr.puts "\t" + err.to_s
|
||||
config = {}
|
||||
end
|
||||
|
||||
# Merge DEFAULTS < _config.yml < override
|
||||
Jekyll::DEFAULTS.deep_merge(config).deep_merge(override)
|
||||
end
|
||||
end
|
||||
385
test/fixtures/ruby/resque.rb
vendored
Normal file
385
test/fixtures/ruby/resque.rb
vendored
Normal file
@@ -0,0 +1,385 @@
|
||||
require 'redis/namespace'
|
||||
|
||||
require 'resque/version'
|
||||
|
||||
require 'resque/errors'
|
||||
|
||||
require 'resque/failure'
|
||||
require 'resque/failure/base'
|
||||
|
||||
require 'resque/helpers'
|
||||
require 'resque/stat'
|
||||
require 'resque/job'
|
||||
require 'resque/worker'
|
||||
require 'resque/plugin'
|
||||
require 'resque/queue'
|
||||
require 'resque/multi_queue'
|
||||
require 'resque/coder'
|
||||
require 'resque/multi_json_coder'
|
||||
|
||||
module Resque
|
||||
include Helpers
|
||||
extend self
|
||||
|
||||
# Accepts:
|
||||
# 1. A 'hostname:port' String
|
||||
# 2. A 'hostname:port:db' String (to select the Redis db)
|
||||
# 3. A 'hostname:port/namespace' String (to set the Redis namespace)
|
||||
# 4. A Redis URL String 'redis://host:port'
|
||||
# 5. An instance of `Redis`, `Redis::Client`, `Redis::DistRedis`,
|
||||
# or `Redis::Namespace`.
|
||||
def redis=(server)
|
||||
case server
|
||||
when String
|
||||
if server =~ /redis\:\/\//
|
||||
redis = Redis.connect(:url => server, :thread_safe => true)
|
||||
else
|
||||
server, namespace = server.split('/', 2)
|
||||
host, port, db = server.split(':')
|
||||
redis = Redis.new(:host => host, :port => port,
|
||||
:thread_safe => true, :db => db)
|
||||
end
|
||||
namespace ||= :resque
|
||||
|
||||
@redis = Redis::Namespace.new(namespace, :redis => redis)
|
||||
when Redis::Namespace
|
||||
@redis = server
|
||||
else
|
||||
@redis = Redis::Namespace.new(:resque, :redis => server)
|
||||
end
|
||||
@queues = Hash.new { |h,name|
|
||||
h[name] = Resque::Queue.new(name, @redis, coder)
|
||||
}
|
||||
end
|
||||
|
||||
# Encapsulation of encode/decode. Overwrite this to use it across Resque.
|
||||
# This defaults to MultiJson for backwards compatibilty.
|
||||
def coder
|
||||
@coder ||= MultiJsonCoder.new
|
||||
end
|
||||
attr_writer :coder
|
||||
|
||||
# Returns the current Redis connection. If none has been created, will
|
||||
# create a new one.
|
||||
def redis
|
||||
return @redis if @redis
|
||||
self.redis = Redis.respond_to?(:connect) ? Redis.connect : "localhost:6379"
|
||||
self.redis
|
||||
end
|
||||
|
||||
def redis_id
|
||||
# support 1.x versions of redis-rb
|
||||
if redis.respond_to?(:server)
|
||||
redis.server
|
||||
elsif redis.respond_to?(:nodes) # distributed
|
||||
redis.nodes.map { |n| n.id }.join(', ')
|
||||
else
|
||||
redis.client.id
|
||||
end
|
||||
end
|
||||
|
||||
# The `before_first_fork` hook will be run in the **parent** process
|
||||
# only once, before forking to run the first job. Be careful- any
|
||||
# changes you make will be permanent for the lifespan of the
|
||||
# worker.
|
||||
#
|
||||
# Call with a block to set the hook.
|
||||
# Call with no arguments to return the hook.
|
||||
def before_first_fork(&block)
|
||||
block ? (@before_first_fork = block) : @before_first_fork
|
||||
end
|
||||
|
||||
# Set a proc that will be called in the parent process before the
|
||||
# worker forks for the first time.
|
||||
attr_writer :before_first_fork
|
||||
|
||||
# The `before_fork` hook will be run in the **parent** process
|
||||
# before every job, so be careful- any changes you make will be
|
||||
# permanent for the lifespan of the worker.
|
||||
#
|
||||
# Call with a block to set the hook.
|
||||
# Call with no arguments to return the hook.
|
||||
def before_fork(&block)
|
||||
block ? (@before_fork = block) : @before_fork
|
||||
end
|
||||
|
||||
# Set the before_fork proc.
|
||||
attr_writer :before_fork
|
||||
|
||||
# The `after_fork` hook will be run in the child process and is passed
|
||||
# the current job. Any changes you make, therefore, will only live as
|
||||
# long as the job currently being processed.
|
||||
#
|
||||
# Call with a block to set the hook.
|
||||
# Call with no arguments to return the hook.
|
||||
def after_fork(&block)
|
||||
block ? (@after_fork = block) : @after_fork
|
||||
end
|
||||
|
||||
# Set the after_fork proc.
|
||||
attr_writer :after_fork
|
||||
|
||||
def to_s
|
||||
"Resque Client connected to #{redis_id}"
|
||||
end
|
||||
|
||||
attr_accessor :inline
|
||||
|
||||
# If 'inline' is true Resque will call #perform method inline
|
||||
# without queuing it into Redis and without any Resque callbacks.
|
||||
# The 'inline' is false Resque jobs will be put in queue regularly.
|
||||
alias :inline? :inline
|
||||
|
||||
#
|
||||
# queue manipulation
|
||||
#
|
||||
|
||||
# Pushes a job onto a queue. Queue name should be a string and the
|
||||
# item should be any JSON-able Ruby object.
|
||||
#
|
||||
# Resque works generally expect the `item` to be a hash with the following
|
||||
# keys:
|
||||
#
|
||||
# class - The String name of the job to run.
|
||||
# args - An Array of arguments to pass the job. Usually passed
|
||||
# via `class.to_class.perform(*args)`.
|
||||
#
|
||||
# Example
|
||||
#
|
||||
# Resque.push('archive', :class => 'Archive', :args => [ 35, 'tar' ])
|
||||
#
|
||||
# Returns nothing
|
||||
def push(queue, item)
|
||||
queue(queue) << item
|
||||
end
|
||||
|
||||
# Pops a job off a queue. Queue name should be a string.
|
||||
#
|
||||
# Returns a Ruby object.
|
||||
def pop(queue)
|
||||
begin
|
||||
queue(queue).pop(true)
|
||||
rescue ThreadError
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
# Returns an integer representing the size of a queue.
|
||||
# Queue name should be a string.
|
||||
def size(queue)
|
||||
queue(queue).size
|
||||
end
|
||||
|
||||
# Returns an array of items currently queued. Queue name should be
|
||||
# a string.
|
||||
#
|
||||
# start and count should be integer and can be used for pagination.
|
||||
# start is the item to begin, count is how many items to return.
|
||||
#
|
||||
# To get the 3rd page of a 30 item, paginatied list one would use:
|
||||
# Resque.peek('my_list', 59, 30)
|
||||
def peek(queue, start = 0, count = 1)
|
||||
queue(queue).slice start, count
|
||||
end
|
||||
|
||||
# Does the dirty work of fetching a range of items from a Redis list
|
||||
# and converting them into Ruby objects.
|
||||
def list_range(key, start = 0, count = 1)
|
||||
if count == 1
|
||||
decode redis.lindex(key, start)
|
||||
else
|
||||
Array(redis.lrange(key, start, start+count-1)).map do |item|
|
||||
decode item
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Returns an array of all known Resque queues as strings.
|
||||
def queues
|
||||
Array(redis.smembers(:queues))
|
||||
end
|
||||
|
||||
# Given a queue name, completely deletes the queue.
|
||||
def remove_queue(queue)
|
||||
queue(queue).destroy
|
||||
@queues.delete(queue.to_s)
|
||||
end
|
||||
|
||||
# Return the Resque::Queue object for a given name
|
||||
def queue(name)
|
||||
@queues[name.to_s]
|
||||
end
|
||||
|
||||
|
||||
#
|
||||
# job shortcuts
|
||||
#
|
||||
|
||||
# This method can be used to conveniently add a job to a queue.
|
||||
# It assumes the class you're passing it is a real Ruby class (not
|
||||
# a string or reference) which either:
|
||||
#
|
||||
# a) has a @queue ivar set
|
||||
# b) responds to `queue`
|
||||
#
|
||||
# If either of those conditions are met, it will use the value obtained
|
||||
# from performing one of the above operations to determine the queue.
|
||||
#
|
||||
# If no queue can be inferred this method will raise a `Resque::NoQueueError`
|
||||
#
|
||||
# Returns true if the job was queued, nil if the job was rejected by a
|
||||
# before_enqueue hook.
|
||||
#
|
||||
# This method is considered part of the `stable` API.
|
||||
def enqueue(klass, *args)
|
||||
enqueue_to(queue_from_class(klass), klass, *args)
|
||||
end
|
||||
|
||||
# Just like `enqueue` but allows you to specify the queue you want to
|
||||
# use. Runs hooks.
|
||||
#
|
||||
# `queue` should be the String name of the queue you're targeting.
|
||||
#
|
||||
# Returns true if the job was queued, nil if the job was rejected by a
|
||||
# before_enqueue hook.
|
||||
#
|
||||
# This method is considered part of the `stable` API.
|
||||
def enqueue_to(queue, klass, *args)
|
||||
# Perform before_enqueue hooks. Don't perform enqueue if any hook returns false
|
||||
before_hooks = Plugin.before_enqueue_hooks(klass).collect do |hook|
|
||||
klass.send(hook, *args)
|
||||
end
|
||||
return nil if before_hooks.any? { |result| result == false }
|
||||
|
||||
Job.create(queue, klass, *args)
|
||||
|
||||
Plugin.after_enqueue_hooks(klass).each do |hook|
|
||||
klass.send(hook, *args)
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
# This method can be used to conveniently remove a job from a queue.
|
||||
# It assumes the class you're passing it is a real Ruby class (not
|
||||
# a string or reference) which either:
|
||||
#
|
||||
# a) has a @queue ivar set
|
||||
# b) responds to `queue`
|
||||
#
|
||||
# If either of those conditions are met, it will use the value obtained
|
||||
# from performing one of the above operations to determine the queue.
|
||||
#
|
||||
# If no queue can be inferred this method will raise a `Resque::NoQueueError`
|
||||
#
|
||||
# If no args are given, this method will dequeue *all* jobs matching
|
||||
# the provided class. See `Resque::Job.destroy` for more
|
||||
# information.
|
||||
#
|
||||
# Returns the number of jobs destroyed.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# # Removes all jobs of class `UpdateNetworkGraph`
|
||||
# Resque.dequeue(GitHub::Jobs::UpdateNetworkGraph)
|
||||
#
|
||||
# # Removes all jobs of class `UpdateNetworkGraph` with matching args.
|
||||
# Resque.dequeue(GitHub::Jobs::UpdateNetworkGraph, 'repo:135325')
|
||||
#
|
||||
# This method is considered part of the `stable` API.
|
||||
def dequeue(klass, *args)
|
||||
# Perform before_dequeue hooks. Don't perform dequeue if any hook returns false
|
||||
before_hooks = Plugin.before_dequeue_hooks(klass).collect do |hook|
|
||||
klass.send(hook, *args)
|
||||
end
|
||||
return if before_hooks.any? { |result| result == false }
|
||||
|
||||
Job.destroy(queue_from_class(klass), klass, *args)
|
||||
|
||||
Plugin.after_dequeue_hooks(klass).each do |hook|
|
||||
klass.send(hook, *args)
|
||||
end
|
||||
end
|
||||
|
||||
# Given a class, try to extrapolate an appropriate queue based on a
|
||||
# class instance variable or `queue` method.
|
||||
def queue_from_class(klass)
|
||||
klass.instance_variable_get(:@queue) ||
|
||||
(klass.respond_to?(:queue) and klass.queue)
|
||||
end
|
||||
|
||||
# This method will return a `Resque::Job` object or a non-true value
|
||||
# depending on whether a job can be obtained. You should pass it the
|
||||
# precise name of a queue: case matters.
|
||||
#
|
||||
# This method is considered part of the `stable` API.
|
||||
def reserve(queue)
|
||||
Job.reserve(queue)
|
||||
end
|
||||
|
||||
# Validates if the given klass could be a valid Resque job
|
||||
#
|
||||
# If no queue can be inferred this method will raise a `Resque::NoQueueError`
|
||||
#
|
||||
# If given klass is nil this method will raise a `Resque::NoClassError`
|
||||
def validate(klass, queue = nil)
|
||||
queue ||= queue_from_class(klass)
|
||||
|
||||
if !queue
|
||||
raise NoQueueError.new("Jobs must be placed onto a queue.")
|
||||
end
|
||||
|
||||
if klass.to_s.empty?
|
||||
raise NoClassError.new("Jobs must be given a class.")
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
#
|
||||
# worker shortcuts
|
||||
#
|
||||
|
||||
# A shortcut to Worker.all
|
||||
def workers
|
||||
Worker.all
|
||||
end
|
||||
|
||||
# A shortcut to Worker.working
|
||||
def working
|
||||
Worker.working
|
||||
end
|
||||
|
||||
# A shortcut to unregister_worker
|
||||
# useful for command line tool
|
||||
def remove_worker(worker_id)
|
||||
worker = Resque::Worker.find(worker_id)
|
||||
worker.unregister_worker
|
||||
end
|
||||
|
||||
#
|
||||
# stats
|
||||
#
|
||||
|
||||
# Returns a hash, similar to redis-rb's #info, of interesting stats.
|
||||
def info
|
||||
return {
|
||||
:pending => queues.inject(0) { |m,k| m + size(k) },
|
||||
:processed => Stat[:processed],
|
||||
:queues => queues.size,
|
||||
:workers => workers.size.to_i,
|
||||
:working => working.size,
|
||||
:failed => Stat[:failed],
|
||||
:servers => [redis_id],
|
||||
:environment => ENV['RAILS_ENV'] || ENV['RACK_ENV'] || 'development'
|
||||
}
|
||||
end
|
||||
|
||||
# Returns an array of all known Resque keys in Redis. Redis' KEYS operation
|
||||
# is O(N) for the keyspace, so be careful - this can be slow for big databases.
|
||||
def keys
|
||||
redis.keys("*").map do |key|
|
||||
key.sub("#{redis.namespace}:", '')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
1775
test/fixtures/ruby/sinatra.rb
vendored
Normal file
1775
test/fixtures/ruby/sinatra.rb
vendored
Normal file
File diff suppressed because it is too large
Load Diff
32
test/fixtures/shell/rbenv-sh-shell
vendored
Executable file
32
test/fixtures/shell/rbenv-sh-shell
vendored
Executable file
@@ -0,0 +1,32 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
[ -n "$RBENV_DEBUG" ] && set -x
|
||||
|
||||
# Provide rbenv completions
|
||||
if [ "$1" = "--complete" ]; then
|
||||
echo --unset
|
||||
echo system
|
||||
exec rbenv-versions --bare
|
||||
fi
|
||||
|
||||
version="$1"
|
||||
|
||||
if [ -z "$version" ]; then
|
||||
if [ -z "$RBENV_VERSION" ]; then
|
||||
echo "rbenv: no shell-specific version configured" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "echo \"\$RBENV_VERSION\""
|
||||
exit
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$version" = "--unset" ]; then
|
||||
echo "unset RBENV_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Make sure the specified version is installed.
|
||||
rbenv-prefix "$version" >/dev/null
|
||||
|
||||
echo "export RBENV_VERSION=\"${version}\""
|
||||
57
test/fixtures/shell/rvm
vendored
Executable file
57
test/fixtures/shell/rvm
vendored
Executable file
@@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
if (( ${rvm_ignore_rvmrc:=0} == 0 ))
|
||||
then
|
||||
declare rvmrc
|
||||
|
||||
rvm_rvmrc_files=("/etc/rvmrc" "$HOME/.rvmrc")
|
||||
if [[ -n "${rvm_prefix:-}" ]] && ! [[ "$HOME/.rvmrc" -ef "${rvm_prefix}/.rvmrc" ]]
|
||||
then rvm_rvmrc_files+=( "${rvm_prefix}/.rvmrc" )
|
||||
fi
|
||||
|
||||
for rvmrc in "${rvm_rvmrc_files[@]}"
|
||||
do
|
||||
if [[ -f "$rvmrc" ]]
|
||||
then
|
||||
if GREP_OPTIONS="" \grep '^\s*rvm .*$' "$rvmrc" >/dev/null 2>&1
|
||||
then
|
||||
printf "%b" "
|
||||
Error:
|
||||
$rvmrc is for rvm settings only.
|
||||
rvm CLI may NOT be called from within $rvmrc.
|
||||
Skipping the loading of $rvmrc
|
||||
"
|
||||
exit 1
|
||||
else
|
||||
source "$rvmrc"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
unset rvm_rvmrc_files
|
||||
unset rvmrc
|
||||
fi
|
||||
|
||||
export rvm_path
|
||||
if [[ -z "${rvm_path:-}" ]]
|
||||
then
|
||||
if (( UID == 0 )) && [[ -d "/usr/local/rvm" ]]
|
||||
then rvm_path="/usr/local/rvm"
|
||||
elif [[ -d "${HOME}/.rvm" ]]
|
||||
then rvm_path="${HOME}/.rvm"
|
||||
elif [[ -d "/usr/local/rvm" ]]
|
||||
then rvm_path="/usr/local/rvm"
|
||||
else echo "Can't find rvm install!" 1>&2 ; exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# allow disabling check temporary
|
||||
: rvm_is_not_a_shell_function:${rvm_is_not_a_shell_function:=1}
|
||||
|
||||
# if to prevent fork-bomb
|
||||
if source "${rvm_scripts_path:="$rvm_path/scripts"}/rvm"
|
||||
then
|
||||
rvm "$@"
|
||||
else
|
||||
echo "Error sourcing RVM!" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
@@ -14,12 +14,12 @@ class TestRepository < Test::Unit::TestCase
|
||||
end
|
||||
|
||||
def test_linguist_language
|
||||
assert_equal Language['JavaScript'], linguist_repo.language
|
||||
# assert_equal Language['Ruby'], linguist_repo.language
|
||||
end
|
||||
|
||||
def test_linguist_languages
|
||||
assert linguist_repo.languages[Language['Ruby']] > 30_000
|
||||
assert linguist_repo.languages[Language['Python']] < 1000
|
||||
assert linguist_repo.languages[Language['Python']] > 1000
|
||||
end
|
||||
|
||||
def test_linguist_size
|
||||
|
||||
Reference in New Issue
Block a user