Make more symbols static.
This commit is contained in:
parent
e1f6646d68
commit
111c180674
|
@ -85,7 +85,7 @@ newblock(block *leftlink, block *rightlink, int len) {
|
|||
return b;
|
||||
}
|
||||
|
||||
void
|
||||
static void
|
||||
freeblock(block *b)
|
||||
{
|
||||
if (numfreeblocks < MAXFREEBLOCKS) {
|
||||
|
@ -957,7 +957,7 @@ typedef struct {
|
|||
int counter; /* number of items remaining for iteration */
|
||||
} dequeiterobject;
|
||||
|
||||
PyTypeObject dequeiter_type;
|
||||
static PyTypeObject dequeiter_type;
|
||||
|
||||
static PyObject *
|
||||
deque_iter(dequeobject *deque)
|
||||
|
@ -1024,7 +1024,7 @@ static PyMethodDef dequeiter_methods[] = {
|
|||
{NULL, NULL} /* sentinel */
|
||||
};
|
||||
|
||||
PyTypeObject dequeiter_type = {
|
||||
static PyTypeObject dequeiter_type = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
"deque_iterator", /* tp_name */
|
||||
sizeof(dequeiterobject), /* tp_basicsize */
|
||||
|
@ -1059,7 +1059,7 @@ PyTypeObject dequeiter_type = {
|
|||
|
||||
/*********************** Deque Reverse Iterator **************************/
|
||||
|
||||
PyTypeObject dequereviter_type;
|
||||
static PyTypeObject dequereviter_type;
|
||||
|
||||
static PyObject *
|
||||
deque_reviter(dequeobject *deque)
|
||||
|
@ -1106,7 +1106,7 @@ dequereviter_next(dequeiterobject *it)
|
|||
return item;
|
||||
}
|
||||
|
||||
PyTypeObject dequereviter_type = {
|
||||
static PyTypeObject dequereviter_type = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
"deque_reverse_iterator", /* tp_name */
|
||||
sizeof(dequeiterobject), /* tp_basicsize */
|
||||
|
|
|
@ -444,7 +444,7 @@ PyLocale_getdefaultlocale(PyObject* self)
|
|||
|
||||
#ifdef HAVE_LANGINFO_H
|
||||
#define LANGINFO(X) {#X, X}
|
||||
struct langinfo_constant{
|
||||
static struct langinfo_constant{
|
||||
char* name;
|
||||
int value;
|
||||
} langinfo_constants[] =
|
||||
|
|
|
@ -490,7 +490,7 @@ Split(char *list)
|
|||
lists. SplitObj walks through a nested tuple, finding string objects that
|
||||
need to be split. */
|
||||
|
||||
PyObject *
|
||||
static PyObject *
|
||||
SplitObj(PyObject *arg)
|
||||
{
|
||||
if (PyTuple_Check(arg)) {
|
||||
|
@ -1523,7 +1523,7 @@ varname_converter(PyObject *in, void *_out)
|
|||
return 0;
|
||||
}
|
||||
|
||||
void
|
||||
static void
|
||||
var_perform(VarEvent *ev)
|
||||
{
|
||||
*(ev->res) = ev->func(ev->self, ev->args, ev->flags);
|
||||
|
|
|
@ -1546,7 +1546,7 @@ static PyGetSetDef array_getsets [] = {
|
|||
{NULL}
|
||||
};
|
||||
|
||||
PyMethodDef array_methods[] = {
|
||||
static PyMethodDef array_methods[] = {
|
||||
{"append", (PyCFunction)array_append, METH_O,
|
||||
append_doc},
|
||||
{"buffer_info", (PyCFunction)array_buffer_info, METH_NOARGS,
|
||||
|
|
|
@ -228,7 +228,7 @@ const _PyUnicode_DatabaseRecord _PyUnicode_Database_Records[] = {
|
|||
#define TOTAL_FIRST 356
|
||||
#define TOTAL_LAST 53
|
||||
struct reindex{int start;short count,index;};
|
||||
struct reindex nfc_first[] = {
|
||||
static struct reindex nfc_first[] = {
|
||||
{ 60, 2, 0},
|
||||
{ 65, 15, 3},
|
||||
{ 82, 8, 19},
|
||||
|
@ -425,7 +425,7 @@ struct reindex nfc_first[] = {
|
|||
{0,0,0}
|
||||
};
|
||||
|
||||
struct reindex nfc_last[] = {
|
||||
static struct reindex nfc_last[] = {
|
||||
{ 768, 4, 0},
|
||||
{ 774, 6, 5},
|
||||
{ 783, 0, 12},
|
||||
|
|
|
@ -229,12 +229,12 @@ def makeunicodedata(unicode, trace):
|
|||
print >>fp, "#define TOTAL_FIRST",total_first
|
||||
print >>fp, "#define TOTAL_LAST",total_last
|
||||
print >>fp, "struct reindex{int start;short count,index;};"
|
||||
print >>fp, "struct reindex nfc_first[] = {"
|
||||
print >>fp, "static struct reindex nfc_first[] = {"
|
||||
for start,end in comp_first_ranges:
|
||||
print >>fp," { %d, %d, %d}," % (start,end-start,comp_first[start])
|
||||
print >>fp," {0,0,0}"
|
||||
print >>fp,"};\n"
|
||||
print >>fp, "struct reindex nfc_last[] = {"
|
||||
print >>fp, "static struct reindex nfc_last[] = {"
|
||||
for start,end in comp_last_ranges:
|
||||
print >>fp," { %d, %d, %d}," % (start,end-start,comp_last[start])
|
||||
print >>fp," {0,0,0}"
|
||||
|
|
Loading…
Reference in New Issue